From d01fa927ca94c0d6c56abe3a07cc837a6f21d9a7 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Mon, 26 Jun 2023 13:15:39 +0900 Subject: [PATCH 01/21] Move files --- Sources/Aespa/{ => Core}/Context/AespaPhotoContext.swift | 0 Sources/Aespa/{ => Core}/Context/AespaVideoContext.swift | 0 Sources/Aespa/{ => Core}/Context/Context.swift | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename Sources/Aespa/{ => Core}/Context/AespaPhotoContext.swift (100%) rename Sources/Aespa/{ => Core}/Context/AespaVideoContext.swift (100%) rename Sources/Aespa/{ => Core}/Context/Context.swift (100%) diff --git a/Sources/Aespa/Context/AespaPhotoContext.swift b/Sources/Aespa/Core/Context/AespaPhotoContext.swift similarity index 100% rename from Sources/Aespa/Context/AespaPhotoContext.swift rename to Sources/Aespa/Core/Context/AespaPhotoContext.swift diff --git a/Sources/Aespa/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift similarity index 100% rename from Sources/Aespa/Context/AespaVideoContext.swift rename to Sources/Aespa/Core/Context/AespaVideoContext.swift diff --git a/Sources/Aespa/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift similarity index 100% rename from Sources/Aespa/Context/Context.swift rename to Sources/Aespa/Core/Context/Context.swift From d582137250737830f7d5b72855b25a89fea35a3a Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Mon, 26 Jun 2023 19:14:21 +0900 Subject: [PATCH 02/21] Add more capabilities for SwiftUI preview --- Sources/Aespa/AespaSession.swift | 10 ++- .../Core/Context/AespaPhotoContext.swift | 3 +- .../Core/Context/AespaVideoContext.swift | 3 +- Sources/Aespa/Core/Context/Context.swift | 13 ++-- .../AVCaptureDevice+AespaRepresentable.swift | 7 +- .../Aespa/Tuner/Device/AutoFocusTuner.swift | 3 +- .../Util/Extension/SwiftUI+Extension.swift | 69 +++++++++++++++++-- 7 files changed, 86 insertions(+), 22 deletions(-) diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 6d0689f..ab84894 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -127,6 +127,12 @@ open class AespaSession { return connection.videoOrientation } + /// This property reflects the device's current position. + public var currentCameraPosition: AVCaptureDevice.Position? { + guard let device = coreSession.videoDeviceInput?.device else { return nil } + return device.position + } + /// This publisher is responsible for emitting updates to the preview layer. /// /// A log message is printed to the console every time a new layer is pushed. @@ -199,8 +205,8 @@ extension AespaSession: CommonContext { } @discardableResult - public func setAutofocusingWithError(mode: AVCaptureDevice.FocusMode) throws -> AespaSession { - let tuner = AutoFocusTuner(mode: mode) + public func setFocusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil) throws -> AespaSession { + let tuner = AutoFocusTuner(mode: mode, point: point) try coreSession.run(tuner) return self } diff --git a/Sources/Aespa/Core/Context/AespaPhotoContext.swift b/Sources/Aespa/Core/Context/AespaPhotoContext.swift index 590058d..e7c9d63 100644 --- a/Sources/Aespa/Core/Context/AespaPhotoContext.swift +++ b/Sources/Aespa/Core/Context/AespaPhotoContext.swift @@ -42,8 +42,7 @@ open class AespaPhotoContext { if let firstPhotoFile = fileManager.fetchPhoto( albumName: option.asset.albumName, subDirectoryName: option.asset.photoDirectoryName, - count: 1).first - { + count: 1).first { photoFileBufferSubject.send(.success(firstPhotoFile)) } } diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index dfdab58..1b5ae50 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -49,8 +49,7 @@ public class AespaVideoContext { if let firstVideoFile = fileManager.fetchVideo( albumName: option.asset.albumName, subDirectoryName: option.asset.videoDirectoryName, - count: 1).first - { + count: 1).first { videoFileBufferSubject.send(.success(firstVideoFile)) } } diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 399cf00..4b1c09f 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -56,7 +56,7 @@ public protocol CommonContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setAutofocusingWithError(mode: AVCaptureDevice.FocusMode) throws -> CommonContextType + @discardableResult func setFocusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint?) throws -> CommonContextType /// Sets the zoom factor for the video recording session. /// @@ -158,12 +158,13 @@ extension CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setAutofocusing( + public func setFocus( mode: AVCaptureDevice.FocusMode, + point: CGPoint? = nil, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setAutofocusingWithError(mode: mode) + return try self.setFocusWithError(mode: mode, point: point) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -210,7 +211,6 @@ extension CommonContext { } } - public protocol VideoContext { associatedtype VideoContextType: VideoContext @@ -394,7 +394,6 @@ extension VideoContext { return underlyingVideoContext } - /// Sets the torch mode and level for the video recording session. /// @@ -437,7 +436,6 @@ extension VideoContext { } } - public protocol PhotoContext { associatedtype PhotoContextType: PhotoContext @@ -503,7 +501,8 @@ public protocol PhotoContext { // MARK: Non-throwing methods // These methods encapsulate error handling within the method itself rather than propagating it to the caller. // This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. +// Although it simplifies error handling, this approach may not be recommended because +// it offers less control to callers. // Developers are encouraged to use methods that throw errors, to gain finer control over error handling. extension PhotoContext { /// Asynchronously captures a photo using the specified `AVCapturePhotoSettings`. diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index 8cc66e9..cec5715 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -19,7 +19,7 @@ protocol AespaCaptureDeviceRepresentable { func isFocusModeSupported(_ focusMode: AVCaptureDevice.FocusMode) -> Bool func setZoomFactor(_ factor: CGFloat) - func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode) + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) func setTorchModeOn(level torchLevel: Float) throws } @@ -38,8 +38,11 @@ extension AVCaptureDevice: AespaCaptureDeviceRepresentable { } } - func setFocusMode(_ focusMode: FocusMode) { + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) { self.focusMode = focusMode + if let point { + self.focusPointOfInterest = point + } } func setZoomFactor(_ factor: CGFloat) { diff --git a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift b/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift index 2b04f43..dbce4b3 100644 --- a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift @@ -11,12 +11,13 @@ import AVFoundation struct AutoFocusTuner: AespaDeviceTuning { let needLock = true let mode: AVCaptureDevice.FocusMode + let point: CGPoint? func tune(_ device: T) throws { guard device.isFocusModeSupported(mode) else { throw AespaError.device(reason: .unsupported) } - device.setFocusMode(mode) + device.setFocusMode(mode, point: point) } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index ae8309b..4de4360 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -16,21 +16,78 @@ public extension AespaSession { /// .resizeAspectFill` by default. /// /// - Returns: `some UIViewRepresentable` which can coordinate other `View` components - func preview(gravity: AVLayerVideoGravity = .resizeAspectFill) -> some UIViewControllerRepresentable { - Preview(of: previewLayer, gravity: gravity) + func preview( + gravity: AVLayerVideoGravity = .resizeAspectFill, + startPosition position: AVCaptureDevice.Position = .front, + preferredFocusMode mode: AVCaptureDevice.FocusMode = .continuousAutoFocus + ) -> some View { + let internalPreview = Preview(of: self, gravity: gravity) + return InteractivePreview(internalPreview, startPosition: position, preferredFocusMode: mode) } } -private struct Preview: UIViewControllerRepresentable { - let previewLayer: AVCaptureVideoPreviewLayer +public struct InteractivePreview: View { + private let preview: Preview + private let preferredFocusMode: AVCaptureDevice.FocusMode + @State private var cameraPosition: AVCaptureDevice.Position + + @GestureState private var magnification: CGFloat = 1.0 + + init( + _ preview: Preview, + startPosition: AVCaptureDevice.Position, + preferredFocusMode: AVCaptureDevice.FocusMode + ) { + self.preview = preview + self.cameraPosition = startPosition + self.preferredFocusMode = preferredFocusMode + } + + var session: AespaSession { + preview.session + } + + var layer: AVCaptureVideoPreviewLayer { + preview.previewLayer + } + + var currentFocusMode: AVCaptureDevice.FocusMode { + session.currentFocusMode ?? preferredFocusMode + } + + public var body: some View { + preview + .gesture(DragGesture(minimumDistance: 0) + .onChanged { value in + if currentFocusMode == .autoFocus { + session.setFocus(mode: .autoFocus, point: value.location) + } + }) + .onTapGesture(count: 2) { + let nextPosition: AVCaptureDevice.Position = cameraPosition == .back ? .front : .back + session.setPosition(to: nextPosition) + } + .gesture(MagnificationGesture() + .updating($magnification) { currentState, gestureState, _ in + gestureState = currentState + session.zoom(factor: gestureState) + } + ) + } +} + +struct Preview: UIViewControllerRepresentable { + let session: AespaSession let gravity: AVLayerVideoGravity + let previewLayer: AVCaptureVideoPreviewLayer init( - of previewLayer: AVCaptureVideoPreviewLayer, + of session: AespaSession, gravity: AVLayerVideoGravity ) { self.gravity = gravity - self.previewLayer = previewLayer + self.session = session + self.previewLayer = AVCaptureVideoPreviewLayer(session: session.avCaptureSession) } func makeUIViewController(context: Context) -> UIViewController { From 7e348a6e07605ea3449773a5bcae699c7191c309 Mon Sep 17 00:00:00 2001 From: enebin Date: Mon, 26 Jun 2023 19:31:03 +0900 Subject: [PATCH 03/21] Update readme --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index c1d3c0e..129f671 100644 --- a/README.md +++ b/README.md @@ -125,10 +125,10 @@ graph LR; | Common | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| -| ✨ `zoom` | Modifies the zoom factor. | -| ✨ `setPosition` | Changes the camera position. | +| ✨ `zoom` | Modifies the zoom factor. | +| ✨ `setPosition` | Changes the camera position. | | `setOrientation` | Modifies the orientation. | -| `setAutofocusing` | Alters the autofocusing mode. | +| `setFocus` | Alters the autofocusing mode. | | `setQuality` | Adjusts the video quality preset for the recording session. | | `doctor` | Checks if essential conditions to start recording are satisfied. | | `previewLayerPublisher` | Responsible for emitting updates to the preview layer. | From fc72602c102ef56e56abce9f2dc2a5d8113ca3b1 Mon Sep 17 00:00:00 2001 From: enebin Date: Mon, 26 Jun 2023 19:43:42 +0900 Subject: [PATCH 04/21] Internalize demo app --- DemoApp/Aespa-iOS.xcodeproj/project.pbxproj | 406 ++++++++++++++++++ .../contents.xcworkspacedata | 7 + .../xcshareddata/IDEWorkspaceChecks.plist | 8 + .../xcshareddata/xcschemes/Aespa-iOS.xcscheme | 101 +++++ DemoApp/Aespa-iOS/Aespa-iOS.entitlements | 5 + DemoApp/Aespa-iOS/Aespa_iOSApp.swift | 17 + .../AccentColor.colorset/Contents.json | 11 + .../AppIcon.appiconset/Contents.json | 13 + .../Aespa-iOS/Assets.xcassets/Contents.json | 6 + DemoApp/Aespa-iOS/GalleryView.swift | 78 ++++ .../Preview Assets.xcassets/Contents.json | 6 + DemoApp/Aespa-iOS/SettingView.swift | 97 +++++ DemoApp/Aespa-iOS/VideoContentView.swift | 162 +++++++ DemoApp/Aespa-iOS/VideoContentViewModel.swift | 117 +++++ DemoApp/Package.swift | 3 + 15 files changed, 1037 insertions(+) create mode 100644 DemoApp/Aespa-iOS.xcodeproj/project.pbxproj create mode 100644 DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata create mode 100644 DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist create mode 100644 DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme create mode 100644 DemoApp/Aespa-iOS/Aespa-iOS.entitlements create mode 100644 DemoApp/Aespa-iOS/Aespa_iOSApp.swift create mode 100644 DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json create mode 100644 DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 DemoApp/Aespa-iOS/Assets.xcassets/Contents.json create mode 100644 DemoApp/Aespa-iOS/GalleryView.swift create mode 100644 DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json create mode 100644 DemoApp/Aespa-iOS/SettingView.swift create mode 100644 DemoApp/Aespa-iOS/VideoContentView.swift create mode 100644 DemoApp/Aespa-iOS/VideoContentViewModel.swift create mode 100644 DemoApp/Package.swift diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.pbxproj b/DemoApp/Aespa-iOS.xcodeproj/project.pbxproj new file mode 100644 index 0000000..bed7218 --- /dev/null +++ b/DemoApp/Aespa-iOS.xcodeproj/project.pbxproj @@ -0,0 +1,406 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { + +/* Begin PBXBuildFile section */ + 0716FED02A37326600B5AA1B /* GalleryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0716FECF2A37326600B5AA1B /* GalleryView.swift */; }; + 0716FED42A3737D700B5AA1B /* Aespa in Frameworks */ = {isa = PBXBuildFile; productRef = 0716FED32A3737D700B5AA1B /* Aespa */; }; + 07778FF12A31E3A000B1DC6C /* SettingView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 07778FF02A31E3A000B1DC6C /* SettingView.swift */; }; + 9CE5B7DC2A306F350058334D /* Aespa_iOSApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */; }; + 9CE5B7DE2A306F350058334D /* VideoContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DD2A306F350058334D /* VideoContentView.swift */; }; + 9CE5B7E02A306F370058334D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DF2A306F370058334D /* Assets.xcassets */; }; + 9CE5B7E32A306F370058334D /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */; }; + 9CE5B80B2A3070380058334D /* VideoContentViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 0716FECF2A37326600B5AA1B /* GalleryView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GalleryView.swift; sourceTree = ""; }; + 07778FF02A31E3A000B1DC6C /* SettingView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingView.swift; sourceTree = ""; }; + 07F81CEE2A49A21B00DEDB04 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; + 9CE5B7D82A306F350058334D /* Aespa-iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Aespa-iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Aespa_iOSApp.swift; sourceTree = ""; }; + 9CE5B7DD2A306F350058334D /* VideoContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoContentView.swift; sourceTree = ""; }; + 9CE5B7DF2A306F370058334D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoContentViewModel.swift; sourceTree = ""; }; + 9CE5B80E2A316BC00058334D /* Aespa-iOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "Aespa-iOS.entitlements"; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 9CE5B7D52A306F350058334D /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 0716FED42A3737D700B5AA1B /* Aespa in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 07F81CED2A49A21B00DEDB04 /* Packages */ = { + isa = PBXGroup; + children = ( + 07F81CEE2A49A21B00DEDB04 /* Aespa */, + ); + name = Packages; + sourceTree = ""; + }; + 9CE5B7CF2A306F350058334D = { + isa = PBXGroup; + children = ( + 07F81CED2A49A21B00DEDB04 /* Packages */, + 9CE5B7DA2A306F350058334D /* Aespa-iOS */, + 9CE5B7D92A306F350058334D /* Products */, + 9CE5B8072A3070200058334D /* Frameworks */, + ); + sourceTree = ""; + }; + 9CE5B7D92A306F350058334D /* Products */ = { + isa = PBXGroup; + children = ( + 9CE5B7D82A306F350058334D /* Aespa-iOS.app */, + ); + name = Products; + sourceTree = ""; + }; + 9CE5B7DA2A306F350058334D /* Aespa-iOS */ = { + isa = PBXGroup; + children = ( + 9CE5B80E2A316BC00058334D /* Aespa-iOS.entitlements */, + 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */, + 9CE5B7DD2A306F350058334D /* VideoContentView.swift */, + 0716FECF2A37326600B5AA1B /* GalleryView.swift */, + 07778FF02A31E3A000B1DC6C /* SettingView.swift */, + 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */, + 9CE5B7DF2A306F370058334D /* Assets.xcassets */, + 9CE5B7E12A306F370058334D /* Preview Content */, + ); + path = "Aespa-iOS"; + sourceTree = ""; + }; + 9CE5B7E12A306F370058334D /* Preview Content */ = { + isa = PBXGroup; + children = ( + 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 9CE5B8072A3070200058334D /* Frameworks */ = { + isa = PBXGroup; + children = ( + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 9CE5B7D72A306F350058334D /* Aespa-iOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = 9CE5B7FC2A306F370058334D /* Build configuration list for PBXNativeTarget "Aespa-iOS" */; + buildPhases = ( + 9CE5B7D42A306F350058334D /* Sources */, + 9CE5B7D52A306F350058334D /* Frameworks */, + 9CE5B7D62A306F350058334D /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "Aespa-iOS"; + packageProductDependencies = ( + 0716FED32A3737D700B5AA1B /* Aespa */, + ); + productName = "Aespa-iOS"; + productReference = 9CE5B7D82A306F350058334D /* Aespa-iOS.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 9CE5B7D02A306F350058334D /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1430; + LastUpgradeCheck = 1430; + TargetAttributes = { + 9CE5B7D72A306F350058334D = { + CreatedOnToolsVersion = 14.3; + }; + }; + }; + buildConfigurationList = 9CE5B7D32A306F350058334D /* Build configuration list for PBXProject "Aespa-iOS" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 9CE5B7CF2A306F350058334D; + packageReferences = ( + ); + productRefGroup = 9CE5B7D92A306F350058334D /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 9CE5B7D72A306F350058334D /* Aespa-iOS */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 9CE5B7D62A306F350058334D /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 9CE5B7E32A306F370058334D /* Preview Assets.xcassets in Resources */, + 9CE5B7E02A306F370058334D /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 9CE5B7D42A306F350058334D /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 9CE5B7DE2A306F350058334D /* VideoContentView.swift in Sources */, + 07778FF12A31E3A000B1DC6C /* SettingView.swift in Sources */, + 9CE5B7DC2A306F350058334D /* Aespa_iOSApp.swift in Sources */, + 9CE5B80B2A3070380058334D /* VideoContentViewModel.swift in Sources */, + 0716FED02A37326600B5AA1B /* GalleryView.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 9CE5B7FA2A306F370058334D /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 9CE5B7FB2A306F370058334D /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 9CE5B7FD2A306F370058334D /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_ENTITLEMENTS = "Aespa-iOS/Aespa-iOS.entitlements"; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"Aespa-iOS/Preview Content\""; + DEVELOPMENT_TEAM = W6QHM4Y43Z; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSAppleMusicUsageDescription = ""; + INFOPLIST_KEY_NSCameraUsageDescription = "Use camera"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use mic"; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "Use and add album"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use album"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "co.enebin.Aespa-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 9CE5B7FE2A306F370058334D /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_ENTITLEMENTS = "Aespa-iOS/Aespa-iOS.entitlements"; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"Aespa-iOS/Preview Content\""; + DEVELOPMENT_TEAM = W6QHM4Y43Z; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSAppleMusicUsageDescription = ""; + INFOPLIST_KEY_NSCameraUsageDescription = "Use camera"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use mic"; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "Use and add album"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use album"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "co.enebin.Aespa-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 9CE5B7D32A306F350058334D /* Build configuration list for PBXProject "Aespa-iOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9CE5B7FA2A306F370058334D /* Debug */, + 9CE5B7FB2A306F370058334D /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 9CE5B7FC2A306F370058334D /* Build configuration list for PBXNativeTarget "Aespa-iOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9CE5B7FD2A306F370058334D /* Debug */, + 9CE5B7FE2A306F370058334D /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + +/* Begin XCSwiftPackageProductDependency section */ + 0716FED32A3737D700B5AA1B /* Aespa */ = { + isa = XCSwiftPackageProductDependency; + productName = Aespa; + }; +/* End XCSwiftPackageProductDependency section */ + }; + rootObject = 9CE5B7D02A306F350058334D /* Project object */; +} diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..919434a --- /dev/null +++ b/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000..18d9810 --- /dev/null +++ b/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme b/DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme new file mode 100644 index 0000000..096fbb8 --- /dev/null +++ b/DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/DemoApp/Aespa-iOS/Aespa-iOS.entitlements b/DemoApp/Aespa-iOS/Aespa-iOS.entitlements new file mode 100644 index 0000000..0c67376 --- /dev/null +++ b/DemoApp/Aespa-iOS/Aespa-iOS.entitlements @@ -0,0 +1,5 @@ + + + + + diff --git a/DemoApp/Aespa-iOS/Aespa_iOSApp.swift b/DemoApp/Aespa-iOS/Aespa_iOSApp.swift new file mode 100644 index 0000000..1f8b392 --- /dev/null +++ b/DemoApp/Aespa-iOS/Aespa_iOSApp.swift @@ -0,0 +1,17 @@ +// +// Aespa_iOSApp.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import SwiftUI + +@main +struct Aespa_iOSApp: App { + var body: some Scene { + WindowGroup { + VideoContentView() + } + } +} diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json b/DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000..eb87897 --- /dev/null +++ b/DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..13613e3 --- /dev/null +++ b/DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,13 @@ +{ + "images" : [ + { + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/Contents.json b/DemoApp/Aespa-iOS/Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/DemoApp/Aespa-iOS/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/DemoApp/Aespa-iOS/GalleryView.swift b/DemoApp/Aespa-iOS/GalleryView.swift new file mode 100644 index 0000000..fe4cfbb --- /dev/null +++ b/DemoApp/Aespa-iOS/GalleryView.swift @@ -0,0 +1,78 @@ +// +// GalleryView.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/12. +// + +import Aespa +import SwiftUI + +struct GalleryView: View { + @ObservedObject var viewModel: VideoContentViewModel + + @Binding private var mediaType: MediaType + + init( + mediaType: Binding, + contentViewModel viewModel: VideoContentViewModel + ) { + self._mediaType = mediaType + self.viewModel = viewModel + } + + var body: some View { + VStack(alignment: .center) { + Picker("File", selection: $mediaType) { + Text("Video").tag(MediaType.video) + Text("Photo").tag(MediaType.photo) + } + .pickerStyle(.segmented) + .frame(width: 200) + .padding(.vertical) + + ScrollView { + switch mediaType { + case .photo: + LazyVGrid( + columns: [GridItem(.flexible()), GridItem(.flexible()), GridItem(.flexible())], + spacing: 5 + ) { + ForEach(viewModel.photoFiles) { file in + let image = Image(uiImage: file.thumbnail ?? UIImage()) + + image + .resizable() + .scaledToFill() + } + } + .onAppear { + viewModel.fetchPhotoFiles() + } + case .video: + LazyVGrid( + columns: [GridItem(.flexible()), GridItem(.flexible()), GridItem(.flexible())], + spacing: 5 + ) { + ForEach(viewModel.videoFiles) { file in + let image = Image(uiImage: file.thumbnail ?? UIImage()) + + image + .resizable() + .scaledToFill() + } + } + .onAppear { + viewModel.fetchVideoFiles() + } + } + } + } + } +} + +struct GalleryView_Previews: PreviewProvider { + static var previews: some View { + GalleryView(mediaType: .constant(.video), contentViewModel: .init()) + } +} diff --git a/DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json b/DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/DemoApp/Aespa-iOS/SettingView.swift b/DemoApp/Aespa-iOS/SettingView.swift new file mode 100644 index 0000000..02a1e0a --- /dev/null +++ b/DemoApp/Aespa-iOS/SettingView.swift @@ -0,0 +1,97 @@ +// +// SettingView.swift +// Aespa-iOS +// +// Created by Young Bin on 2023/06/08. +// + +import SwiftUI +import AVFoundation + +struct SettingView: View { + @ObservedObject var viewModel: VideoContentViewModel + + @State private var quality: AVCaptureSession.Preset + @State private var focusMode: AVCaptureDevice.FocusMode + + @State private var isMuted: Bool + + @State private var flashMode: AVCaptureDevice.FlashMode + + init(contentViewModel viewModel: VideoContentViewModel) { + self.viewModel = viewModel + + self.quality = viewModel.aespaSession.avCaptureSession.sessionPreset + self.focusMode = viewModel.aespaSession.currentFocusMode ?? .continuousAutoFocus + + self.isMuted = viewModel.aespaSession.isMuted + + self.flashMode = viewModel.aespaSession.currentSetting.flashMode + } + + var body: some View { + List { + Section(header: Text("Common")) { + Picker("Quality", selection: $quality) { + Text("Low").tag(AVCaptureSession.Preset.low) + Text("Medium").tag(AVCaptureSession.Preset.medium) + Text("High").tag(AVCaptureSession.Preset.high) + } + .modifier(TitledPicker(title: "Asset quality")) + .onChange(of: quality) { newValue in + viewModel.aespaSession.setQuality(to: newValue) + } + + Picker("Focus", selection: $focusMode) { + Text("Auto").tag(AVCaptureDevice.FocusMode.autoFocus) + Text("Locked").tag(AVCaptureDevice.FocusMode.locked) + Text("Continuous").tag(AVCaptureDevice.FocusMode.continuousAutoFocus) + } + .modifier(TitledPicker(title: "Focus mode")) + .onChange(of: focusMode) { newValue in + viewModel.aespaSession.setFocus(mode: newValue) + } + } + + Section(header: Text("Video")) { + Picker("Mute", selection: $isMuted) { + Text("Unmute").tag(false) + Text("Mute").tag(true) + } + .modifier(TitledPicker(title: "Mute")) + .onChange(of: isMuted) { newValue in + _ = newValue ? + viewModel.aespaSession.mute() : + viewModel.aespaSession.unmute() + } + } + + Section(header: Text("Photo")) { + Picker("Flash", selection: $flashMode) { + Text("On").tag(AVCaptureDevice.FlashMode.on) + Text("Off").tag(AVCaptureDevice.FlashMode.off) + Text("Auto").tag(AVCaptureDevice.FlashMode.auto) + } + .modifier(TitledPicker(title: "Flash mode")) + .onChange(of: flashMode) { newValue in + viewModel.aespaSession.setFlashMode(to: newValue) + } + } + } + } + + struct TitledPicker: ViewModifier { + let title: String + func body(content: Content) -> some View { + VStack(alignment: .leading) { + Text(title) + .foregroundColor(.gray) + .font(.caption) + + content + .pickerStyle(.segmented) + .frame(height: 40) + } + } + } +} diff --git a/DemoApp/Aespa-iOS/VideoContentView.swift b/DemoApp/Aespa-iOS/VideoContentView.swift new file mode 100644 index 0000000..87d0671 --- /dev/null +++ b/DemoApp/Aespa-iOS/VideoContentView.swift @@ -0,0 +1,162 @@ +// +// VideoContentView.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import Aespa +import SwiftUI + +struct VideoContentView: View { + @State var isRecording = false + @State var isFront = false + + @State var showSetting = false + @State var showGallery = false + + @State var captureMode: MediaType = .video + + @ObservedObject private var viewModel = VideoContentViewModel() + + var body: some View { + ZStack { + viewModel.preview + .frame(minWidth: 0, + maxWidth: .infinity, + minHeight: 0, + maxHeight: .infinity) + .edgesIgnoringSafeArea(.all) + + VStack { + ZStack(alignment: .center) { + // Mode change + Picker("Capture Modes", selection: $captureMode) { + Text("Video").tag(MediaType.video) + Text("Photo").tag(MediaType.photo) + } + .pickerStyle(.segmented) + .background(Color.black.opacity(0.7)) + .cornerRadius(8) + .frame(width: 200) + + HStack { + Spacer() + + Button(action: { showSetting = true }) { + Image(systemName: "gear") + .resizable() + .foregroundColor(.white) + .scaledToFit() + .frame(width: 30, height: 30) + + } + .padding(20) + .contentShape(Rectangle()) + } + } + + Spacer() + + ZStack { + HStack { + // Album thumbnail + button + Button(action: { showGallery = true }) { + let coverImage = ( + captureMode == .video + ? viewModel.videoAlbumCover + : viewModel.photoAlbumCover) + ?? Image("") + + roundRectangleShape(with: coverImage, size: 80) + } + .shadow(radius: 5) + .contentShape(Rectangle()) + + Spacer() + + // Position change + button + Button(action: { + viewModel.aespaSession.setPosition(to: isFront ? .back : .front) + isFront.toggle() + }) { + Image(systemName: "arrow.triangle.2.circlepath.camera.fill") + .resizable() + .foregroundColor(.white) + .scaledToFit() + .frame(width: 50, height: 50) + .padding(20) + .padding(.trailing, 20) + } + .shadow(radius: 5) + .contentShape(Rectangle()) + } + + // Shutter + button + recordingButtonShape(width: 60).onTapGesture { + switch captureMode { + case .video: + if isRecording { + viewModel.aespaSession.stopRecording() + isRecording = false + } else { + viewModel.aespaSession.startRecording() + isRecording = true + } + case .photo: + viewModel.aespaSession.capturePhoto() + } + } + } + } + } + .sheet(isPresented: $showSetting) { + SettingView(contentViewModel: viewModel) + } + .sheet(isPresented: $showGallery) { + GalleryView(mediaType: $captureMode, contentViewModel: viewModel) + } + } +} + +extension VideoContentView { + @ViewBuilder + func roundRectangleShape(with image: Image, size: CGFloat) -> some View { + image + .resizable() + .scaledToFill() + .frame(width: size, height: size, alignment: .center) + .clipped() + .cornerRadius(10) + .overlay( + RoundedRectangle(cornerRadius: 10) + .stroke(.white, lineWidth: 1) + ) + .padding(20) + } + + @ViewBuilder + func recordingButtonShape(width: CGFloat) -> some View { + ZStack { + Circle() + .strokeBorder(isRecording ? .red : .white, lineWidth: 3) + .frame(width: width) + + Circle() + .fill(isRecording ? .red : .white) + .frame(width: width * 0.8) + } + .frame(height: width) + } +} + +enum MediaType { + case video + case photo +} + +struct VideoContentView_Previews: PreviewProvider { + static var previews: some View { + VideoContentView() + } +} diff --git a/DemoApp/Aespa-iOS/VideoContentViewModel.swift b/DemoApp/Aespa-iOS/VideoContentViewModel.swift new file mode 100644 index 0000000..d0d8712 --- /dev/null +++ b/DemoApp/Aespa-iOS/VideoContentViewModel.swift @@ -0,0 +1,117 @@ +// +// VideoContentViewModel.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import Combine +import SwiftUI +import Foundation + +import Aespa + +class VideoContentViewModel: ObservableObject { + let aespaSession: AespaSession + + var preview: some View { + aespaSession.preview() + } + + private var subscription = Set() + + @Published var videoAlbumCover: Image? + @Published var photoAlbumCover: Image? + + @Published var videoFiles: [VideoFile] = [] + @Published var photoFiles: [PhotoFile] = [] + + init() { + let option = AespaOption(albumName: "Aespa-Demo") + self.aespaSession = Aespa.session(with: option) + + Task(priority: .background) { + do { + try await Aespa.configure() + + // Common setting + aespaSession + .setFocus(mode: .continuousAutoFocus) + .setOrientation(to: .portrait) + .setQuality(to: .high) + .custom(WideColorCameraTuner()) + + // Photo-only setting + aespaSession + .setFlashMode(to: .on) + .redEyeReduction(enabled: true) + + // Video-only setting + aespaSession + .mute() + .setStabilization(mode: .auto) + + // Prepare video album cover + aespaSession.videoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.videoAlbumCover, on: self) + .store(in: &subscription) + + // Prepare photo album cover + aespaSession.photoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.photoAlbumCover, on: self) + .store(in: &subscription) + + } catch let error { + print(error) + } + } + } + + func fetchVideoFiles() { + // File fetching task can cause low reponsiveness when called from main thread + DispatchQueue.global().async { + let fetchedFiles = self.aespaSession.fetchVideoFiles() + + DispatchQueue.main.async { + self.videoFiles = fetchedFiles + } + } + } + + func fetchPhotoFiles() { + // File fetching task can cause low reponsiveness when called from main thread + DispatchQueue.global().async { + let fetchedFiles = self.aespaSession.fetchPhotoFiles() + + DispatchQueue.main.async { + self.photoFiles = fetchedFiles + } + } + } +} + + +extension VideoContentViewModel { + // Example for using custom session tuner + struct WideColorCameraTuner: AespaSessionTuning { + func tune(_ session: T) throws where T : AespaCoreSessionRepresentable { + session.avCaptureSession.automaticallyConfiguresCaptureDeviceForWideColor = true + } + } +} diff --git a/DemoApp/Package.swift b/DemoApp/Package.swift new file mode 100644 index 0000000..da8e907 --- /dev/null +++ b/DemoApp/Package.swift @@ -0,0 +1,3 @@ +import PackageDescription + +let package = Package() From 3018bf260de4392b81195546e8e7388e9bfeb024 Mon Sep 17 00:00:00 2001 From: enebin Date: Mon, 26 Jun 2023 22:13:22 +0900 Subject: [PATCH 05/21] Improve zooming for interactive preview --- .../Util/Extension/SwiftUI+Extension.swift | 46 ++++++++++++++----- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 4de4360..85eaa79 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -10,13 +10,22 @@ import SwiftUI import AVFoundation public extension AespaSession { + func preview( + gravity: AVLayerVideoGravity = .resizeAspectFill, + startPosition position: AVCaptureDevice.Position = .back, + preferredFocusMode mode: AVCaptureDevice.FocusMode = .continuousAutoFocus + ) -> some View { + let internalPreview = Preview(of: self, gravity: gravity) + return InteractivePreview(internalPreview, startPosition: position, preferredFocusMode: mode) + } + /// A `SwiftUI` `View` that you use to display video as it is being captured by an input device. /// /// - Parameter gravity: Define `AVLayerVideoGravity` for preview's orientation. /// .resizeAspectFill` by default. /// /// - Returns: `some UIViewRepresentable` which can coordinate other `View` components - func preview( + func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, startPosition position: AVCaptureDevice.Position = .front, preferredFocusMode mode: AVCaptureDevice.FocusMode = .continuousAutoFocus @@ -31,8 +40,9 @@ public struct InteractivePreview: View { private let preferredFocusMode: AVCaptureDevice.FocusMode @State private var cameraPosition: AVCaptureDevice.Position - @GestureState private var magnification: CGFloat = 1.0 - + @State private var previousZoomFactor: CGFloat = 1.0 + @State private var currentZoomFactor: CGFloat = 1.0 + init( _ preview: Preview, startPosition: AVCaptureDevice.Position, @@ -55,22 +65,36 @@ public struct InteractivePreview: View { session.currentFocusMode ?? preferredFocusMode } + var currentCameraPosition: AVCaptureDevice.Position { + session.currentCameraPosition ?? cameraPosition + } + public var body: some View { + let maxZoomFactor = session.maxZoomFactor ?? 1.0 + preview + .onTapGesture(count: 2) { + let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back + session.setPosition(to: nextPosition) + cameraPosition = nextPosition + } .gesture(DragGesture(minimumDistance: 0) .onChanged { value in if currentFocusMode == .autoFocus { session.setFocus(mode: .autoFocus, point: value.location) } }) - .onTapGesture(count: 2) { - let nextPosition: AVCaptureDevice.Position = cameraPosition == .back ? .front : .back - session.setPosition(to: nextPosition) - } .gesture(MagnificationGesture() - .updating($magnification) { currentState, gestureState, _ in - gestureState = currentState - session.zoom(factor: gestureState) + .onChanged { (scale) in + let videoZoomFactor = scale * previousZoomFactor + if (videoZoomFactor <= maxZoomFactor) { + let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) + session.zoom(factor: newZoomFactor) + } + } + .onEnded { (scale) in + let videoZoomFactor = scale * previousZoomFactor + previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 } ) } @@ -87,7 +111,7 @@ struct Preview: UIViewControllerRepresentable { ) { self.gravity = gravity self.session = session - self.previewLayer = AVCaptureVideoPreviewLayer(session: session.avCaptureSession) + self.previewLayer = session.previewLayer } func makeUIViewController(context: Context) -> UIViewController { From 78561057e1f532483cfc3bcd9cf44e96540325e3 Mon Sep 17 00:00:00 2001 From: enebin Date: Mon, 26 Jun 2023 22:22:51 +0900 Subject: [PATCH 06/21] Improve focusing --- DemoApp/Aespa-iOS/VideoContentViewModel.swift | 2 +- .../Util/Extension/SwiftUI+Extension.swift | 22 ++++++++----------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/DemoApp/Aespa-iOS/VideoContentViewModel.swift b/DemoApp/Aespa-iOS/VideoContentViewModel.swift index d0d8712..bc3fc6c 100644 --- a/DemoApp/Aespa-iOS/VideoContentViewModel.swift +++ b/DemoApp/Aespa-iOS/VideoContentViewModel.swift @@ -15,7 +15,7 @@ class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession var preview: some View { - aespaSession.preview() + aespaSession.interactivePreview() } private var subscription = Set() diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 85eaa79..e563ea6 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -12,11 +12,9 @@ import AVFoundation public extension AespaSession { func preview( gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .back, - preferredFocusMode mode: AVCaptureDevice.FocusMode = .continuousAutoFocus + startPosition position: AVCaptureDevice.Position = .back ) -> some View { - let internalPreview = Preview(of: self, gravity: gravity) - return InteractivePreview(internalPreview, startPosition: position, preferredFocusMode: mode) + return Preview(of: self, gravity: gravity) } /// A `SwiftUI` `View` that you use to display video as it is being captured by an input device. @@ -25,19 +23,19 @@ public extension AespaSession { /// .resizeAspectFill` by default. /// /// - Returns: `some UIViewRepresentable` which can coordinate other `View` components + /// + /// - Warning: Tap-to-focus works only in `autoFocus` mode. Make sure you're using the mode. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .front, - preferredFocusMode mode: AVCaptureDevice.FocusMode = .continuousAutoFocus + startPosition position: AVCaptureDevice.Position = .front ) -> some View { let internalPreview = Preview(of: self, gravity: gravity) - return InteractivePreview(internalPreview, startPosition: position, preferredFocusMode: mode) + return InteractivePreview(internalPreview, startPosition: position) } } public struct InteractivePreview: View { private let preview: Preview - private let preferredFocusMode: AVCaptureDevice.FocusMode @State private var cameraPosition: AVCaptureDevice.Position @State private var previousZoomFactor: CGFloat = 1.0 @@ -45,12 +43,10 @@ public struct InteractivePreview: View { init( _ preview: Preview, - startPosition: AVCaptureDevice.Position, - preferredFocusMode: AVCaptureDevice.FocusMode + startPosition: AVCaptureDevice.Position ) { self.preview = preview self.cameraPosition = startPosition - self.preferredFocusMode = preferredFocusMode } var session: AespaSession { @@ -62,7 +58,7 @@ public struct InteractivePreview: View { } var currentFocusMode: AVCaptureDevice.FocusMode { - session.currentFocusMode ?? preferredFocusMode + session.currentFocusMode ?? .continuousAutoFocus } var currentCameraPosition: AVCaptureDevice.Position { @@ -79,7 +75,7 @@ public struct InteractivePreview: View { cameraPosition = nextPosition } .gesture(DragGesture(minimumDistance: 0) - .onChanged { value in + .onEnded { value in if currentFocusMode == .autoFocus { session.setFocus(mode: .autoFocus, point: value.location) } From eeca530dc24c8472e12da0b65d1989c82724f7ca Mon Sep 17 00:00:00 2001 From: enebin Date: Mon, 26 Jun 2023 22:29:19 +0900 Subject: [PATCH 07/21] Lint and add comments --- .../Core/Context/AespaVideoContext.swift | 3 ++ Sources/Aespa/Core/Context/Context.swift | 35 ++++++++++++++++--- .../Capture/CapturePhotoProcessor.swift | 5 --- .../Util/Extension/SwiftUI+Extension.swift | 22 +++++++++--- 4 files changed, 50 insertions(+), 15 deletions(-) diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index 1b5ae50..e68f091 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -24,6 +24,7 @@ public class AespaVideoContext { private let videoFileBufferSubject: CurrentValueSubject?, Never> + /// A Boolean value that indicates whether the session is currently recording video. public var isRecording: Bool init( @@ -87,6 +88,7 @@ extension AespaVideoContext: VideoContext { } try recorder.startRecording(in: filePath) + isRecording = true } public func stopRecordingWithError() async throws -> VideoFile { @@ -96,6 +98,7 @@ extension AespaVideoContext: VideoContext { try await albumManager.addToAlbum(filePath: videoFilePath) videoFileBufferSubject.send(.success(videoFile)) + isRecording = false return videoFile } diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 4b1c09f..c424de5 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -10,11 +10,18 @@ import Combine import Foundation import AVFoundation +/// public typealias ErrorHandler = (Error) -> Void +/// A protocol that defines the common behaviors and properties that all context types must implement. +/// +/// It includes methods to control the quality, position, orientation, and auto-focusing behavior +/// of the session. It also includes the ability to adjust the zoom level of the session. public protocol CommonContext { + /// associatedtype CommonContextType: CommonContext & VideoContext & PhotoContext + /// var underlyingCommonContext: CommonContextType { get } /// Sets the quality preset for the video recording session. @@ -47,7 +54,9 @@ public protocol CommonContext { /// /// - Note: It sets the orientation of the video you are recording, /// not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func setOrientationWithError(to orientation: AVCaptureVideoOrientation) throws -> CommonContextType + @discardableResult func setOrientationWithError( + to orientation: AVCaptureVideoOrientation + ) throws -> CommonContextType /// Sets the autofocusing mode for the video recording session. /// @@ -56,7 +65,9 @@ public protocol CommonContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setFocusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint?) throws -> CommonContextType + @discardableResult func setFocusWithError( + mode: AVCaptureDevice.FocusMode, point: CGPoint? + ) throws -> CommonContextType /// Sets the zoom factor for the video recording session. /// @@ -78,7 +89,8 @@ public protocol CommonContext { // MARK: Non-throwing methods // These methods encapsulate error handling within the method itself rather than propagating it to the caller. // This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. +// Although it simplifies error handling, this approach may not be recommended +// because it offers less control to callers. // Developers are encouraged to use methods that throw errors, to gain finer control over error handling. extension CommonContext { /// Sets the quality preset for the video recording session. @@ -211,11 +223,18 @@ extension CommonContext { } } +/// A protocol that defines the behaviors and properties specific to the video context. +/// +/// It adds video-specific capabilities such as checking if +/// the session is currently recording or muted, and controlling video recording, +/// stabilization, torch mode, and fetching recorded video files. public protocol VideoContext { + /// associatedtype VideoContextType: VideoContext - + /// var underlyingVideoContext: VideoContextType { get } + /// A Boolean value that indicates whether the session is currently recording video. var isRecording: Bool { get } /// This publisher is responsible for emitting `VideoFile` objects resulting from completed recordings. @@ -297,7 +316,8 @@ public protocol VideoContext { // MARK: Non-throwing methods // These methods encapsulate error handling within the method itself rather than propagating it to the caller. // This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. +// Although it simplifies error handling, this approach may not be recommended +// because it offers less control to callers. // Developers are encouraged to use methods that throw errors, to gain finer control over error handling. extension VideoContext { /// Starts the recording of a video session. @@ -436,6 +456,11 @@ extension VideoContext { } } +/// A protocol that defines the behaviors and properties specific to the photo context. +/// +/// It adds photo-specific capabilities such as accessing +/// current photo settings, controlling flash mode, and red-eye reduction, capturing +/// photo, and fetching captured photo files. public protocol PhotoContext { associatedtype PhotoContextType: PhotoContext diff --git a/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift b/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift index 78f6b47..26f079b 100644 --- a/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift +++ b/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift @@ -11,11 +11,6 @@ struct CapturePhotoProcessor: AespaCapturePhotoOutputProcessing { let setting: AVCapturePhotoSettings let delegate: AVCapturePhotoCaptureDelegate - init(setting: AVCapturePhotoSettings, delegate: AVCapturePhotoCaptureDelegate) { - self.setting = setting - self.delegate = delegate - } - func process(_ output: T) throws where T: AespaPhotoOutputRepresentable { guard output.getConnection(with: .video) != nil else { throw AespaError.session(reason: .cannotFindConnection) diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index e563ea6..48432f5 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -10,6 +10,14 @@ import SwiftUI import AVFoundation public extension AespaSession { + /// This function is used to create a preview of the session. + /// It returns a SwiftUI `View` that displays video as it is being captured. + /// + /// - Parameter gravity: Defines how the video is displayed within the layer bounds. + /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. + /// - Parameter position: Determines the initial position of the camera (front or back). Default is .back. + /// + /// - Returns: A SwiftUI `View` that displays the video feed. func preview( gravity: AVLayerVideoGravity = .resizeAspectFill, startPosition position: AVCaptureDevice.Position = .back @@ -17,14 +25,17 @@ public extension AespaSession { return Preview(of: self, gravity: gravity) } - /// A `SwiftUI` `View` that you use to display video as it is being captured by an input device. + /// This function is used to create an interactive preview of the session. + /// It returns a SwiftUI `View` that not only displays video as it is being captured, + /// but also allows user interaction like tap-to-focus, pinch zoom and double tap position change. /// - /// - Parameter gravity: Define `AVLayerVideoGravity` for preview's orientation. - /// .resizeAspectFill` by default. + /// - Parameter gravity: Defines how the video is displayed within the layer bounds. + /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. + /// - Parameter position: Determines the initial position of the camera (front or back). Default is .front. /// - /// - Returns: `some UIViewRepresentable` which can coordinate other `View` components + /// - Returns: A SwiftUI `View` that displays the video feed and allows user interaction. /// - /// - Warning: Tap-to-focus works only in `autoFocus` mode. Make sure you're using the mode. + /// - Warning: Tap-to-focus works only in `autoFocus` mode. Make sure you're using this mode for the feature to work. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, startPosition position: AVCaptureDevice.Position = .front @@ -34,6 +45,7 @@ public extension AespaSession { } } + public struct InteractivePreview: View { private let preview: Preview @State private var cameraPosition: AVCaptureDevice.Position From 85bfdc5d1ee85043a1eb84c53ec5d0a3c13357f9 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Tue, 27 Jun 2023 11:25:32 +0900 Subject: [PATCH 08/21] Change folder name. --- {DemoApp => Demo}/Aespa-iOS.xcodeproj/project.pbxproj | 0 .../project.xcworkspace/contents.xcworkspacedata | 0 .../project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist | 0 .../Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme | 0 {DemoApp => Demo}/Aespa-iOS/Aespa-iOS.entitlements | 0 {DemoApp => Demo}/Aespa-iOS/Aespa_iOSApp.swift | 0 .../Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json | 0 .../Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json | 0 {DemoApp => Demo}/Aespa-iOS/Assets.xcassets/Contents.json | 0 {DemoApp => Demo}/Aespa-iOS/GalleryView.swift | 0 .../Preview Content/Preview Assets.xcassets/Contents.json | 0 {DemoApp => Demo}/Aespa-iOS/SettingView.swift | 0 {DemoApp => Demo}/Aespa-iOS/VideoContentView.swift | 0 {DemoApp => Demo}/Aespa-iOS/VideoContentViewModel.swift | 0 {DemoApp => Demo}/Package.swift | 0 15 files changed, 0 insertions(+), 0 deletions(-) rename {DemoApp => Demo}/Aespa-iOS.xcodeproj/project.pbxproj (100%) rename {DemoApp => Demo}/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata (100%) rename {DemoApp => Demo}/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist (100%) rename {DemoApp => Demo}/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme (100%) rename {DemoApp => Demo}/Aespa-iOS/Aespa-iOS.entitlements (100%) rename {DemoApp => Demo}/Aespa-iOS/Aespa_iOSApp.swift (100%) rename {DemoApp => Demo}/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json (100%) rename {DemoApp => Demo}/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json (100%) rename {DemoApp => Demo}/Aespa-iOS/Assets.xcassets/Contents.json (100%) rename {DemoApp => Demo}/Aespa-iOS/GalleryView.swift (100%) rename {DemoApp => Demo}/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json (100%) rename {DemoApp => Demo}/Aespa-iOS/SettingView.swift (100%) rename {DemoApp => Demo}/Aespa-iOS/VideoContentView.swift (100%) rename {DemoApp => Demo}/Aespa-iOS/VideoContentViewModel.swift (100%) rename {DemoApp => Demo}/Package.swift (100%) diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.pbxproj b/Demo/Aespa-iOS.xcodeproj/project.pbxproj similarity index 100% rename from DemoApp/Aespa-iOS.xcodeproj/project.pbxproj rename to Demo/Aespa-iOS.xcodeproj/project.pbxproj diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata similarity index 100% rename from DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata rename to Demo/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata diff --git a/DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist similarity index 100% rename from DemoApp/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist rename to Demo/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist diff --git a/DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme b/Demo/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme similarity index 100% rename from DemoApp/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme rename to Demo/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme diff --git a/DemoApp/Aespa-iOS/Aespa-iOS.entitlements b/Demo/Aespa-iOS/Aespa-iOS.entitlements similarity index 100% rename from DemoApp/Aespa-iOS/Aespa-iOS.entitlements rename to Demo/Aespa-iOS/Aespa-iOS.entitlements diff --git a/DemoApp/Aespa-iOS/Aespa_iOSApp.swift b/Demo/Aespa-iOS/Aespa_iOSApp.swift similarity index 100% rename from DemoApp/Aespa-iOS/Aespa_iOSApp.swift rename to Demo/Aespa-iOS/Aespa_iOSApp.swift diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json similarity index 100% rename from DemoApp/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json rename to Demo/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json similarity index 100% rename from DemoApp/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json rename to Demo/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json diff --git a/DemoApp/Aespa-iOS/Assets.xcassets/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/Contents.json similarity index 100% rename from DemoApp/Aespa-iOS/Assets.xcassets/Contents.json rename to Demo/Aespa-iOS/Assets.xcassets/Contents.json diff --git a/DemoApp/Aespa-iOS/GalleryView.swift b/Demo/Aespa-iOS/GalleryView.swift similarity index 100% rename from DemoApp/Aespa-iOS/GalleryView.swift rename to Demo/Aespa-iOS/GalleryView.swift diff --git a/DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json b/Demo/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json similarity index 100% rename from DemoApp/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json rename to Demo/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json diff --git a/DemoApp/Aespa-iOS/SettingView.swift b/Demo/Aespa-iOS/SettingView.swift similarity index 100% rename from DemoApp/Aespa-iOS/SettingView.swift rename to Demo/Aespa-iOS/SettingView.swift diff --git a/DemoApp/Aespa-iOS/VideoContentView.swift b/Demo/Aespa-iOS/VideoContentView.swift similarity index 100% rename from DemoApp/Aespa-iOS/VideoContentView.swift rename to Demo/Aespa-iOS/VideoContentView.swift diff --git a/DemoApp/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift similarity index 100% rename from DemoApp/Aespa-iOS/VideoContentViewModel.swift rename to Demo/Aespa-iOS/VideoContentViewModel.swift diff --git a/DemoApp/Package.swift b/Demo/Package.swift similarity index 100% rename from DemoApp/Package.swift rename to Demo/Package.swift From c15fa131feb2795614f78d7aa48195e9ba522923 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Tue, 27 Jun 2023 13:06:32 +0900 Subject: [PATCH 09/21] Update readme --- README.md | 48 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 129f671..84ae22f 100644 --- a/README.md +++ b/README.md @@ -47,17 +47,12 @@ try await Aespa.configure() ## Introduction Aespa is a robust and intuitive Swift package for video capturing, built with a focus on the ease of setting up and usage. +It is designed to be easy to use for both beginners and experienced developers. If you're new to video recording on iOS or if you're looking to simplify your existing recording setup, Aespa could be the perfect fit for your project. -**This package provides a high-level API over Apple's `AVFoundation` framework**: abstracting away its complexity and making it straightforward for developers to implement video capturing functionalities in their iOS applications. - -**This package provides a clean, user-friendly API for common video recording tasks**: including starting and stopping recording, managing audio settings, adjusting video quality, setting camera position, etc. - - -## Features -Aespa is designed to be easy to use for both beginners and experienced developers. If you're new to video recording on iOS or if you're looking to simplify your existing recording setup, Aespa could be the perfect fit for your project. +### ✅ Super easy to use
- ✅ Super easy to use + Zip the boring configuration routine *Before* ``` mermaid @@ -69,6 +64,7 @@ AS -- "Connect" --> AIA["AVCaptureAudioInput"] AS -- "Add" --> FO["AVCaptureFileOutput"] FO --> PHCollectionListChangeRequest ``` + **Aespa** ``` mermaid graph LR @@ -80,7 +76,7 @@ graph LR
- ✅ Offer essential preset configuration & customization + Offer essential preset configuration & customization ``` mermaid graph TD @@ -96,7 +92,16 @@ AS --> D["Fetching asset files"]
- ✅ Combine & async support + Comprehensive error handling + +- The package provides comprehensive error handling, allowing you to build robust applications with minimal effort. + +
+ +### ✅ No more delegate +
+ + Combine & async support ``` mermaid graph LR; @@ -110,12 +115,11 @@ graph LR;
-
- ✅ Comprehensive error handling +### ✅ Also... +- Seamless image and video capture within a single preview session. +- Automated system permission management. +- Support SPM. -- The package provides comprehensive error handling, allowing you to build robust applications with minimal effort. - -
## Functionality @@ -123,6 +127,17 @@ graph LR; > > You can access our **official documentation** for the most comprehensive and up-to-date explanations in [here](https://enebin.github.io/Aespa/documentation/aespa/) +### Interactive Preview +One of our main feature, `InteractivePreview` provides a comprehensive and intuitive way for users to interact directly with the camera preview. + +| Features | Description | +|------------------------|------------------------------------------------------------------------------------------------------------------| +| Tap-to-focus | Adjusts the focus of the camera based on the tapped area on the screen. | +| Double tap camera change | Switches between the front and back camera upon double tapping. | +| Pinch zoom | Allows zooming in or out on the preview by using a pinch gesture. | + + +### More manaul options | Common | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| | ✨ `zoom` | Modifies the zoom factor. | @@ -154,6 +169,7 @@ graph LR; | ✨ `fetchPhotoFiles` | Fetches a list of captured photos files. | | `photoFilePublisher` | Emits a `Result` object containing a latest image file data. | + ## Installation ### Swift Package Manager (SPM) Follow these steps to install **Aespa** using SPM: @@ -166,7 +182,7 @@ https://github.com/enebin/Aespa.git 3. For the `Version rule`, select `Up to Next Minor` and specify the current Aespa version then click `Next`. 4. On the final screen, select the `Aespa` library and then click `Finish`. -**Aespa** should now be integrated into your project 🚀 +**Aespa** should now be integrated into your project 🚀. ## Usage From b0f8660881d00fab4a3a8b7a67aad95e9f25d99f Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Tue, 27 Jun 2023 17:24:59 +0900 Subject: [PATCH 10/21] Add focusing ui foundation --- Sources/Aespa/Core/Context/Context.swift | 3 +- .../Util/Extension/SwiftUI+Extension.swift | 144 ++++++++++++++---- 2 files changed, 120 insertions(+), 27 deletions(-) diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index c424de5..6a2acfe 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -462,8 +462,9 @@ extension VideoContext { /// current photo settings, controlling flash mode, and red-eye reduction, capturing /// photo, and fetching captured photo files. public protocol PhotoContext { + /// associatedtype PhotoContextType: PhotoContext - + /// var underlyingPhotoContext: PhotoContextType { get } /// The publisher that broadcasts the result of a photo file operation. diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 48432f5..2786306 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -35,7 +35,8 @@ public extension AespaSession { /// /// - Returns: A SwiftUI `View` that displays the video feed and allows user interaction. /// - /// - Warning: Tap-to-focus works only in `autoFocus` mode. Make sure you're using this mode for the feature to work. + /// - Warning: Tap-to-focus works only in `autoFocus` mode. + /// Make sure you're using this mode for the feature to work. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, startPosition position: AVCaptureDevice.Position = .front @@ -45,13 +46,24 @@ public extension AespaSession { } } - public struct InteractivePreview: View { private let preview: Preview + private let animationQueue: OperationQueue + // Position + @State private var enableChangePosition = true @State private var cameraPosition: AVCaptureDevice.Position + // Zoom + @State private var enableZoom = true @State private var previousZoomFactor: CGFloat = 1.0 @State private var currentZoomFactor: CGFloat = 1.0 + + // Foocus + @State private var enableFocus = true + @State private var enableShowingCrosshair = true + @State private var tappedLocation = CGPoint.zero + @State private var focusFrameOpacity: Double = 0 + @State private var focusingTask: Task? init( _ preview: Preview, @@ -59,6 +71,8 @@ public struct InteractivePreview: View { ) { self.preview = preview self.cameraPosition = startPosition + self.animationQueue = OperationQueue() + animationQueue.maxConcurrentOperationCount = 1 } var session: AespaSession { @@ -78,33 +92,111 @@ public struct InteractivePreview: View { } public var body: some View { - let maxZoomFactor = session.maxZoomFactor ?? 1.0 + ZStack { + preview + .gesture(changePositionGesture) + .gesture(tapToFocusGesture) + .gesture(pinchZoomGesture) + + // Crosshair + Rectangle() + .stroke(lineWidth: 1) + .foregroundColor(Color.yellow) + .frame(width: 100, height: 100) + .position(tappedLocation) + .opacity(focusFrameOpacity) + .animation(.spring(), value: focusFrameOpacity) + } + } +} - preview - .onTapGesture(count: 2) { - let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back - session.setPosition(to: nextPosition) - cameraPosition = nextPosition +private extension InteractivePreview { + var changePositionGesture: some Gesture { + guard enableChangePosition else { + return TapGesture(count: 2).onEnded{} + } + + return TapGesture(count: 2).onEnded { + let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back + session.setPosition(to: nextPosition) + + cameraPosition = nextPosition + } + } + + var tapToFocusGesture: some Gesture { + guard enableFocus else { + return DragGesture(minimumDistance: 0).onEnded{ _ in } + } + + return DragGesture(minimumDistance: 0) + .onEnded { value in + guard enableFocus else { return } + + // if currentFocusMode == .autoFocus { + session.setFocus(mode: .autoFocus, point: value.location) + tappedLocation = value.location + + showCrosshair() + // } } - .gesture(DragGesture(minimumDistance: 0) - .onEnded { value in - if currentFocusMode == .autoFocus { - session.setFocus(mode: .autoFocus, point: value.location) - } - }) - .gesture(MagnificationGesture() - .onChanged { (scale) in - let videoZoomFactor = scale * previousZoomFactor - if (videoZoomFactor <= maxZoomFactor) { - let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) - session.zoom(factor: newZoomFactor) - } - } - .onEnded { (scale) in - let videoZoomFactor = scale * previousZoomFactor - previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 + } + + var pinchZoomGesture: some Gesture { + guard enableZoom else { + return MagnificationGesture().onChanged { _ in } .onEnded { _ in } + } + + let maxZoomFactor = session.maxZoomFactor ?? 1.0 + return MagnificationGesture() + .onChanged { (scale) in + let videoZoomFactor = scale * previousZoomFactor + if (videoZoomFactor <= maxZoomFactor) { + let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) + session.zoom(factor: newZoomFactor) } - ) + } + .onEnded { (scale) in + let videoZoomFactor = scale * previousZoomFactor + previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 + } + } + + func showCrosshair() { + guard enableShowingCrosshair else { return } + + // Cancel the previous task + focusingTask?.cancel() + + focusingTask = Task { + withAnimation { focusFrameOpacity = 1 } + + // Sleep for 2 seconds + try await Task.sleep(nanoseconds: 2 * 1_000_000_000) + withAnimation { focusFrameOpacity = 0.35 } + + // Sleep for 3 more seconds + try await Task.sleep(nanoseconds: 3 * 1_000_000_000) + withAnimation { focusFrameOpacity = 0 } + } + } +} + +public extension InteractivePreview { + func crosshair(enabled: Bool) { + enableShowingCrosshair = enabled + } + + func tapToFocus(enabled: Bool) { + enableFocus = enabled + } + + func doubleTapToChangeCameraPosition(enabled: Bool) { + enableChangePosition = enabled + } + + func pinchZoom(enabled: Bool) { + enableZoom = enabled } } From 836f5300112971f755196d10a0b591201990ae9c Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Tue, 27 Jun 2023 18:35:29 +0900 Subject: [PATCH 11/21] Fix failing tests --- Tests/TestHostApp.xcodeproj/project.pbxproj | 14 +++++++------- Tests/Tests/Tuner/DeviceTunerTests.swift | 9 +++++---- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/Tests/TestHostApp.xcodeproj/project.pbxproj b/Tests/TestHostApp.xcodeproj/project.pbxproj index 5d4a45f..99f9dd1 100644 --- a/Tests/TestHostApp.xcodeproj/project.pbxproj +++ b/Tests/TestHostApp.xcodeproj/project.pbxproj @@ -61,7 +61,7 @@ 9C727D0B2A3FEF9800EF9472 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 9C727D0E2A3FEF9800EF9472 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 9C727D142A3FEF9900EF9472 /* TestHostAppTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = TestHostAppTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - 9C727D542A3FF09400EF9472 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; + 9CA8C9BC2A4AE41300548463 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; 9CD12FF92A452FA10012D1E1 /* URLCacheStorageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = URLCacheStorageTests.swift; sourceTree = ""; }; 9CD12FFB2A454AC40012D1E1 /* GeneratorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GeneratorTests.swift; sourceTree = ""; }; 9CD12FFD2A454B770012D1E1 /* MockImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockImage.swift; sourceTree = ""; }; @@ -153,8 +153,8 @@ 9C727CFB2A3FEF9600EF9472 = { isa = PBXGroup; children = ( + 9CA8C9BB2A4AE41300548463 /* Packages */, 9C4BBE4A2A3FF4870071C84F /* Test.xctestplan */, - 9C727D532A3FF09400EF9472 /* Packages */, 9C727D062A3FEF9600EF9472 /* TestHostApp */, 9C4BBE4B2A400E450071C84F /* Tests */, 9C727D052A3FEF9600EF9472 /* Products */, @@ -190,19 +190,19 @@ path = "Preview Content"; sourceTree = ""; }; - 9C727D532A3FF09400EF9472 /* Packages */ = { + 9C727D552A3FF0B100EF9472 /* Frameworks */ = { isa = PBXGroup; children = ( - 9C727D542A3FF09400EF9472 /* Aespa */, ); - name = Packages; + name = Frameworks; sourceTree = ""; }; - 9C727D552A3FF0B100EF9472 /* Frameworks */ = { + 9CA8C9BB2A4AE41300548463 /* Packages */ = { isa = PBXGroup; children = ( + 9CA8C9BC2A4AE41300548463 /* Aespa */, ); - name = Frameworks; + name = Packages; sourceTree = ""; }; 9CF0FE2B2A40573000FEE8C9 /* Data */ = { diff --git a/Tests/Tests/Tuner/DeviceTunerTests.swift b/Tests/Tests/Tuner/DeviceTunerTests.swift index d5f232c..a01dd74 100644 --- a/Tests/Tests/Tuner/DeviceTunerTests.swift +++ b/Tests/Tests/Tuner/DeviceTunerTests.swift @@ -25,18 +25,19 @@ final class DeviceTunerTests: XCTestCase { func testAutoFocusTuner() throws { let mode = AVCaptureDevice.FocusMode.locked - let tuner = AutoFocusTuner(mode: mode) + let point = CGPoint() + let tuner = AutoFocusTuner(mode: mode, point: point) stub(device) { proxy in when(proxy.isFocusModeSupported(equal(to: mode))).thenReturn(true) - when(proxy.setFocusMode(equal(to: mode))).then { mode in - when(proxy.focusMode.get).thenReturn(mode) + when(proxy.setFocusMode(equal(to: mode), point: equal(to: point))).then { mode in + when(proxy.focusMode.get).thenReturn(.locked) } } try tuner.tune(device) verify(device) - .setFocusMode(equal(to: mode)) + .setFocusMode(equal(to: mode), point: equal(to: point)) .with(returnType: Void.self) XCTAssertEqual(device.focusMode, mode) From 8bb2963d3f0e1485f15881749509e6ceb62b0347 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Tue, 27 Jun 2023 19:36:15 +0900 Subject: [PATCH 12/21] Fix focusing point logic --- Demo/Aespa-iOS/VideoContentView.swift | 2 + Demo/Aespa-iOS/VideoContentViewModel.swift | 4 +- .../Aespa/Tuner/Device/AutoFocusTuner.swift | 13 ++++++- .../Util/Extension/SwiftUI+Extension.swift | 38 +++++++++++-------- 4 files changed, 38 insertions(+), 19 deletions(-) diff --git a/Demo/Aespa-iOS/VideoContentView.swift b/Demo/Aespa-iOS/VideoContentView.swift index 87d0671..397d084 100644 --- a/Demo/Aespa-iOS/VideoContentView.swift +++ b/Demo/Aespa-iOS/VideoContentView.swift @@ -22,6 +22,8 @@ struct VideoContentView: View { var body: some View { ZStack { viewModel.preview + .crosshair(enabled: false) + .pinchZoom(enabled: true) .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index bc3fc6c..f4642bd 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -14,7 +14,7 @@ import Aespa class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession - var preview: some View { + var preview: InteractivePreview { aespaSession.interactivePreview() } @@ -36,7 +36,7 @@ class VideoContentViewModel: ObservableObject { // Common setting aespaSession - .setFocus(mode: .continuousAutoFocus) + .setFocus(mode: .autoFocus) .setOrientation(to: .portrait) .setQuality(to: .high) .custom(WideColorCameraTuner()) diff --git a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift b/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift index dbce4b3..d5f8694 100644 --- a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift @@ -5,19 +5,28 @@ // Created by Young Bin on 2023/06/10. // +import UIKit import Foundation import AVFoundation struct AutoFocusTuner: AespaDeviceTuning { let needLock = true let mode: AVCaptureDevice.FocusMode - let point: CGPoint? + let point: CGPoint? // Should be passed as original CGPoint, not mapped func tune(_ device: T) throws { guard device.isFocusModeSupported(mode) else { throw AespaError.device(reason: .unsupported) } + + var parsedPoint = point + if let point { + parsedPoint = CGPoint( + x: point.x / UIScreen.main.bounds.width, + y: point.y / UIScreen.main.bounds.height + ) + } - device.setFocusMode(mode, point: point) + device.setFocusMode(mode, point: parsedPoint) } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 2786306..6bfa723 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -40,7 +40,7 @@ public extension AespaSession { func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, startPosition position: AVCaptureDevice.Position = .front - ) -> some View { + ) -> InteractivePreview { let internalPreview = Preview(of: self, gravity: gravity) return InteractivePreview(internalPreview, startPosition: position) } @@ -64,7 +64,7 @@ public struct InteractivePreview: View { @State private var tappedLocation = CGPoint.zero @State private var focusFrameOpacity: Double = 0 @State private var focusingTask: Task? - + init( _ preview: Preview, startPosition: AVCaptureDevice.Position @@ -133,12 +133,16 @@ private extension InteractivePreview { .onEnded { value in guard enableFocus else { return } - // if currentFocusMode == .autoFocus { +// guard currentFocusMode == .autoFocus || currentFocusMode == .continuousAutoFocus else { +// return +// } + session.setFocus(mode: .autoFocus, point: value.location) tappedLocation = value.location - showCrosshair() - // } + if enableShowingCrosshair { + showCrosshair() + } } } @@ -183,20 +187,24 @@ private extension InteractivePreview { } public extension InteractivePreview { - func crosshair(enabled: Bool) { + func crosshair(enabled: Bool) -> Self { enableShowingCrosshair = enabled + return self } - func tapToFocus(enabled: Bool) { + func tapToFocus(enabled: Bool) -> Self { enableFocus = enabled + return self } - func doubleTapToChangeCameraPosition(enabled: Bool) { + func doubleTapToChangeCameraPosition(enabled: Bool) -> Self { enableChangePosition = enabled + return self } - func pinchZoom(enabled: Bool) { + func pinchZoom(enabled: Bool) -> Self { enableZoom = enabled + return self } } @@ -204,7 +212,7 @@ struct Preview: UIViewControllerRepresentable { let session: AespaSession let gravity: AVLayerVideoGravity let previewLayer: AVCaptureVideoPreviewLayer - + init( of session: AespaSession, gravity: AVLayerVideoGravity @@ -213,21 +221,21 @@ struct Preview: UIViewControllerRepresentable { self.session = session self.previewLayer = session.previewLayer } - + func makeUIViewController(context: Context) -> UIViewController { let viewController = UIViewController() viewController.view.backgroundColor = .clear - + return viewController } - + func updateUIViewController(_ uiViewController: UIViewController, context: Context) { previewLayer.videoGravity = gravity uiViewController.view.layer.addSublayer(previewLayer) - + previewLayer.frame = uiViewController.view.bounds } - + func dismantleUIViewController(_ uiViewController: UIViewController, coordinator: ()) { previewLayer.removeFromSuperlayer() } From f4a428c9b912836d395b8fde51c7ceb9eb50db00 Mon Sep 17 00:00:00 2001 From: enebin Date: Wed, 28 Jun 2023 09:40:20 +0900 Subject: [PATCH 13/21] Add movement tracking --- Sources/Aespa/AespaSession.swift | 2 +- .../AVCaptureDevice+AespaRepresentable.swift | 5 +++-- ...{AutoFocusTuner.swift => FocusTuner.swift} | 10 ++++++--- .../Util/Extension/SwiftUI+Extension.swift | 22 ++++++++++++------- 4 files changed, 25 insertions(+), 14 deletions(-) rename Sources/Aespa/Tuner/Device/{AutoFocusTuner.swift => FocusTuner.swift} (76%) diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index ab84894..ec7e930 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -206,7 +206,7 @@ extension AespaSession: CommonContext { @discardableResult public func setFocusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil) throws -> AespaSession { - let tuner = AutoFocusTuner(mode: mode, point: point) + let tuner = FocusTuner(mode: mode, point: point) try coreSession.run(tuner) return self } diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index cec5715..903579f 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -8,14 +8,15 @@ import Foundation import AVFoundation -protocol AespaCaptureDeviceRepresentable { +protocol AespaCaptureDeviceRepresentable: NSObject { var hasTorch: Bool { get } var focusMode: AVCaptureDevice.FocusMode { get set } + var isSubjectAreaChangeMonitoringEnabled: Bool { get set } var flashMode: AVCaptureDevice.FlashMode { get set } var videoZoomFactor: CGFloat { get set } var maxResolution: Double? { get } - + func isFocusModeSupported(_ focusMode: AVCaptureDevice.FocusMode) -> Bool func setZoomFactor(_ factor: CGFloat) diff --git a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift b/Sources/Aespa/Tuner/Device/FocusTuner.swift similarity index 76% rename from Sources/Aespa/Tuner/Device/AutoFocusTuner.swift rename to Sources/Aespa/Tuner/Device/FocusTuner.swift index d5f8694..8d4f46f 100644 --- a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/FocusTuner.swift @@ -1,5 +1,5 @@ // -// AutoFocusTuner.swift +// FocusTuner.swift // // // Created by Young Bin on 2023/06/10. @@ -9,16 +9,19 @@ import UIKit import Foundation import AVFoundation -struct AutoFocusTuner: AespaDeviceTuning { +struct FocusTuner: AespaDeviceTuning { let needLock = true + let mode: AVCaptureDevice.FocusMode + let isSubjectAreaChangeMonitoringEnabled: Bool + let point: CGPoint? // Should be passed as original CGPoint, not mapped func tune(_ device: T) throws { guard device.isFocusModeSupported(mode) else { throw AespaError.device(reason: .unsupported) } - + var parsedPoint = point if let point { parsedPoint = CGPoint( @@ -27,6 +30,7 @@ struct AutoFocusTuner: AespaDeviceTuning { ) } + device.isSubjectAreaChangeMonitoringEnabled = isSubjectAreaChangeMonitoringEnabled device.setFocusMode(mode, point: parsedPoint) } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 6bfa723..8c7b780 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -10,7 +10,7 @@ import SwiftUI import AVFoundation public extension AespaSession { - /// This function is used to create a preview of the session. + /// This function is used to create a preview of the session. Doesn't offer any functionalities. /// It returns a SwiftUI `View` that displays video as it is being captured. /// /// - Parameter gravity: Defines how the video is displayed within the layer bounds. @@ -20,7 +20,8 @@ public extension AespaSession { /// - Returns: A SwiftUI `View` that displays the video feed. func preview( gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .back + startPosition position: AVCaptureDevice.Position = .back, + preferredFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus ) -> some View { return Preview(of: self, gravity: gravity) } @@ -39,16 +40,20 @@ public extension AespaSession { /// Make sure you're using this mode for the feature to work. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .front + startPosition position: AVCaptureDevice.Position = .back, + preferredFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus ) -> InteractivePreview { let internalPreview = Preview(of: self, gravity: gravity) - return InteractivePreview(internalPreview, startPosition: position) + return InteractivePreview( + internalPreview, + startPosition: position, + preferredFocusMode: focusMode) } } public struct InteractivePreview: View { private let preview: Preview - private let animationQueue: OperationQueue + // Position @State private var enableChangePosition = true @State private var cameraPosition: AVCaptureDevice.Position @@ -59,6 +64,7 @@ public struct InteractivePreview: View { @State private var currentZoomFactor: CGFloat = 1.0 // Foocus + @State private var preferredFocusMode: AVCaptureDevice.FocusMode @State private var enableFocus = true @State private var enableShowingCrosshair = true @State private var tappedLocation = CGPoint.zero @@ -67,12 +73,12 @@ public struct InteractivePreview: View { init( _ preview: Preview, - startPosition: AVCaptureDevice.Position + startPosition: AVCaptureDevice.Position, + preferredFocusMode focusMode: AVCaptureDevice.FocusMode ) { self.preview = preview self.cameraPosition = startPosition - self.animationQueue = OperationQueue() - animationQueue.maxConcurrentOperationCount = 1 + self.preferredFocusMode = focusMode } var session: AespaSession { From 8f5fb8d84ee36823f64aed1659e562cb204ba9ca Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Wed, 28 Jun 2023 13:48:37 +0900 Subject: [PATCH 14/21] Add change monitoring --- Demo/Aespa-iOS/VideoContentViewModel.swift | 2 + Sources/Aespa/AespaSession.swift | 37 ++++++- Sources/Aespa/Core/Context/Context.swift | 19 ++++ .../Tuner/Device/ChangeMonitoringTuner.swift | 23 +++++ Sources/Aespa/Tuner/Device/FocusTuner.swift | 3 - .../Util/Extension/SwiftUI+Extension.swift | 97 ++++++++++--------- 6 files changed, 128 insertions(+), 53 deletions(-) create mode 100644 Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index f4642bd..71bc98d 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -34,9 +34,11 @@ class VideoContentViewModel: ObservableObject { do { try await Aespa.configure() + // MARK: Settings should be done after `configure` // Common setting aespaSession .setFocus(mode: .autoFocus) + .setChangeMonitoring(enabled: true) .setOrientation(to: .portrait) .setQuality(to: .high) .custom(WideColorCameraTuner()) diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index ec7e930..57927d1 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -20,8 +20,8 @@ import AVFoundation /// /// It also includes functionalities to fetch video files. open class AespaSession { - private let option: AespaOption - private let coreSession: AespaCoreSession + let option: AespaOption + let coreSession: AespaCoreSession private let fileManager: AespaCoreFileManager private let albumManager: AespaCoreAlbumManager @@ -102,6 +102,10 @@ open class AespaSession { public var avCaptureSession: AVCaptureSession { coreSession } + + public var isRunning: Bool { + coreSession.isRunning + } /// This property provides the maximum zoom factor supported by the active video device format. public var maxZoomFactor: CGFloat? { @@ -133,6 +137,11 @@ open class AespaSession { return device.position } + public var isSubjectAreaChangeMonitoringEnabled: Bool? { + guard let device = coreSession.videoDeviceInput?.device else { return nil } + return device.isSubjectAreaChangeMonitoringEnabled + } + /// This publisher is responsible for emitting updates to the preview layer. /// /// A log message is printed to the console every time a new layer is pushed. @@ -144,8 +153,23 @@ open class AespaSession { .compactMap { $0 } .eraseToAnyPublisher() } - + // MARK: - Utilities + + public func getSubjectAreaDidChangePublisher() -> AnyPublisher { + if isSubjectAreaChangeMonitoringEnabled != true { + Logger.log( + message: """ + `isSubjectAreaChangeMonitoringEnabled` is not set `true. + `AVCaptureDeviceSubjectAreaDidChange` publisher may not publish anything. + """) + } + + return NotificationCenter.default + .publisher(for: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange) + .eraseToAnyPublisher() + } + /// Checks if essential conditions to start recording are satisfied. /// This includes checking for capture authorization, if the session is running, /// if there is an existing connection and if a device is attached. @@ -218,6 +242,13 @@ extension AespaSession: CommonContext { return self } + @discardableResult + public func setChangeMonitoringWithError(enabled: Bool) throws -> AespaSession { + let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) + try coreSession.run(tuner) + return self + } + public func customizeWithError(_ tuner: T) throws -> AespaSession { try coreSession.run(tuner) return self diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 6a2acfe..02f488a 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -78,6 +78,8 @@ public protocol CommonContext { /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult func zoomWithError(factor: CGFloat) throws -> CommonContextType + @discardableResult func setChangeMonitoringWithError(enabled: Bool) throws -> CommonContextType + /// This function provides a way to use a custom tuner to modify the current session. /// The tuner must conform to `AespaSessionTuning`. /// @@ -207,6 +209,23 @@ extension CommonContext { return underlyingCommonContext } + @discardableResult + public func setChangeMonitoring( + enabled: Bool, + errorHandler: ErrorHandler? = nil + ) -> CommonContextType { + do { + return try self.setChangeMonitoringWithError(enabled: enabled) + } catch let error { + errorHandler?(error) + Logger.log(error: error) // Logs any errors encountered during the operation + } + + return underlyingCommonContext + } + + + @discardableResult public func custom( _ tuner: T, diff --git a/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift new file mode 100644 index 0000000..74c8e28 --- /dev/null +++ b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift @@ -0,0 +1,23 @@ +// +// ChangeMonitoringTuner.swift +// +// +// Created by 이영빈 on 2023/06/28. +// + +import Foundation +import AVFoundation + +struct ChangeMonitoringTuner: AespaDeviceTuning { + let needLock = true + + let enabled: Bool + + init(isSubjectAreaChangeMonitoringEnabled: Bool) { + self.enabled = isSubjectAreaChangeMonitoringEnabled + } + + func tune(_ device: T) throws { + device.isSubjectAreaChangeMonitoringEnabled = enabled + } +} diff --git a/Sources/Aespa/Tuner/Device/FocusTuner.swift b/Sources/Aespa/Tuner/Device/FocusTuner.swift index 8d4f46f..4268f95 100644 --- a/Sources/Aespa/Tuner/Device/FocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/FocusTuner.swift @@ -13,8 +13,6 @@ struct FocusTuner: AespaDeviceTuning { let needLock = true let mode: AVCaptureDevice.FocusMode - let isSubjectAreaChangeMonitoringEnabled: Bool - let point: CGPoint? // Should be passed as original CGPoint, not mapped func tune(_ device: T) throws { @@ -30,7 +28,6 @@ struct FocusTuner: AespaDeviceTuning { ) } - device.isSubjectAreaChangeMonitoringEnabled = isSubjectAreaChangeMonitoringEnabled device.setFocusMode(mode, point: parsedPoint) } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 8c7b780..ae23a8f 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -15,14 +15,9 @@ public extension AespaSession { /// /// - Parameter gravity: Defines how the video is displayed within the layer bounds. /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. - /// - Parameter position: Determines the initial position of the camera (front or back). Default is .back. /// /// - Returns: A SwiftUI `View` that displays the video feed. - func preview( - gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .back, - preferredFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus - ) -> some View { + func preview(gravity: AVLayerVideoGravity = .resizeAspectFill) -> some View { return Preview(of: self, gravity: gravity) } @@ -32,7 +27,6 @@ public extension AespaSession { /// /// - Parameter gravity: Defines how the video is displayed within the layer bounds. /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. - /// - Parameter position: Determines the initial position of the camera (front or back). Default is .front. /// /// - Returns: A SwiftUI `View` that displays the video feed and allows user interaction. /// @@ -40,14 +34,10 @@ public extension AespaSession { /// Make sure you're using this mode for the feature to work. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, - startPosition position: AVCaptureDevice.Position = .back, - preferredFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus + prefereedFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus ) -> InteractivePreview { let internalPreview = Preview(of: self, gravity: gravity) - return InteractivePreview( - internalPreview, - startPosition: position, - preferredFocusMode: focusMode) + return InteractivePreview(internalPreview, preferredFocusMode: focusMode) } } @@ -56,7 +46,6 @@ public struct InteractivePreview: View { // Position @State private var enableChangePosition = true - @State private var cameraPosition: AVCaptureDevice.Position // Zoom @State private var enableZoom = true @@ -64,21 +53,25 @@ public struct InteractivePreview: View { @State private var currentZoomFactor: CGFloat = 1.0 // Foocus - @State private var preferredFocusMode: AVCaptureDevice.FocusMode + @State private var preferredFocusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus @State private var enableFocus = true + @State private var focusingLocation = CGPoint.zero + // Crosshair @State private var enableShowingCrosshair = true - @State private var tappedLocation = CGPoint.zero @State private var focusFrameOpacity: Double = 0 - @State private var focusingTask: Task? + @State private var showingCrosshairTask: Task? - init( - _ preview: Preview, - startPosition: AVCaptureDevice.Position, - preferredFocusMode focusMode: AVCaptureDevice.FocusMode - ) { + var subjectAreaChangeMonitoringSubscription: Cancellable? + + init(_ preview: Preview, preferredFocusMode focusMode: AVCaptureDevice.FocusMode) { self.preview = preview - self.cameraPosition = startPosition self.preferredFocusMode = focusMode + self.subjectAreaChangeMonitoringSubscription = preview + .session + .getSubjectAreaDidChangePublisher() + .sink(receiveValue: { [self] _ in + self.resetFocusMode(to: focusMode) + }) } var session: AespaSession { @@ -89,12 +82,12 @@ public struct InteractivePreview: View { preview.previewLayer } - var currentFocusMode: AVCaptureDevice.FocusMode { - session.currentFocusMode ?? .continuousAutoFocus + var currentFocusMode: AVCaptureDevice.FocusMode? { + session.currentFocusMode } - var currentCameraPosition: AVCaptureDevice.Position { - session.currentCameraPosition ?? cameraPosition + var currentCameraPosition: AVCaptureDevice.Position? { + session.currentCameraPosition } public var body: some View { @@ -109,7 +102,7 @@ public struct InteractivePreview: View { .stroke(lineWidth: 1) .foregroundColor(Color.yellow) .frame(width: 100, height: 100) - .position(tappedLocation) + .position(focusingLocation) .opacity(focusFrameOpacity) .animation(.spring(), value: focusFrameOpacity) } @@ -118,33 +111,32 @@ public struct InteractivePreview: View { private extension InteractivePreview { var changePositionGesture: some Gesture { - guard enableChangePosition else { + guard session.isRunning, enableChangePosition else { return TapGesture(count: 2).onEnded{} } return TapGesture(count: 2).onEnded { let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back session.setPosition(to: nextPosition) - - cameraPosition = nextPosition } } var tapToFocusGesture: some Gesture { - guard enableFocus else { + guard session.isRunning, enableFocus else { return DragGesture(minimumDistance: 0).onEnded{ _ in } } return DragGesture(minimumDistance: 0) .onEnded { value in - guard enableFocus else { return } - -// guard currentFocusMode == .autoFocus || currentFocusMode == .continuousAutoFocus else { -// return -// } + guard + let currentFocusMode, + currentFocusMode == .autoFocus || currentFocusMode == .continuousAutoFocus + else { + return + } - session.setFocus(mode: .autoFocus, point: value.location) - tappedLocation = value.location + session.setFocus(mode: currentFocusMode, point: value.location) + focusingLocation = value.location if enableShowingCrosshair { showCrosshair() @@ -153,7 +145,7 @@ private extension InteractivePreview { } var pinchZoomGesture: some Gesture { - guard enableZoom else { + guard session.isRunning, enableZoom else { return MagnificationGesture().onChanged { _ in } .onEnded { _ in } } @@ -172,21 +164,27 @@ private extension InteractivePreview { } } + func resetFocusMode(to focusMode: AVCaptureDevice.FocusMode) { + guard session.isRunning else { return } + session.setFocus(mode: focusMode) + } + func showCrosshair() { guard enableShowingCrosshair else { return } // Cancel the previous task - focusingTask?.cancel() - - focusingTask = Task { + showingCrosshairTask?.cancel() + // Running a new task + showingCrosshairTask = Task { + // 10^9 nano seconds = 1 second + let second: UInt64 = 1_000_000_000 + withAnimation { focusFrameOpacity = 1 } - // Sleep for 2 seconds - try await Task.sleep(nanoseconds: 2 * 1_000_000_000) + try await Task.sleep(nanoseconds: 2 * second) withAnimation { focusFrameOpacity = 0.35 } - // Sleep for 3 more seconds - try await Task.sleep(nanoseconds: 3 * 1_000_000_000) + try await Task.sleep(nanoseconds: 3 * second) withAnimation { focusFrameOpacity = 0 } } } @@ -203,6 +201,11 @@ public extension InteractivePreview { return self } + func preferredFocusMode(_ mode: AVCaptureDevice.FocusMode) -> Self { + preferredFocusMode = mode + return self + } + func doubleTapToChangeCameraPosition(enabled: Bool) -> Self { enableChangePosition = enabled return self From cca85b516f6edae577b0990d7117c1b1563e1813 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Wed, 28 Jun 2023 14:03:28 +0900 Subject: [PATCH 15/21] Change method names --- Demo/Aespa-iOS/SettingView.swift | 6 +-- Demo/Aespa-iOS/VideoContentView.swift | 2 +- Demo/Aespa-iOS/VideoContentViewModel.swift | 12 ++--- README.md | 30 ++++++------- Sources/Aespa/AespaSession.swift | 22 +++++----- .../Core/Context/AespaPhotoContext.swift | 2 +- .../Core/Context/AespaVideoContext.swift | 6 +-- Sources/Aespa/Core/Context/Context.swift | 44 +++++++++---------- ...CaptureConnection+AespaRepresentable.swift | 8 ++-- .../AVCaptureDevice+AespaRepresentable.swift | 12 ++--- .../AespaCoreSession+AespaRepresentable.swift | 8 ++-- .../Connection/VideoOrientationTuner.swift | 2 +- .../Connection/VideoStabilizationTuner.swift | 2 +- Sources/Aespa/Tuner/Device/FocusTuner.swift | 2 +- Sources/Aespa/Tuner/Device/TorchTuner.swift | 2 +- Sources/Aespa/Tuner/Device/ZoomTuner.swift | 2 +- .../Tuner/Session/CameraPositionTuner.swift | 2 +- .../Aespa/Tuner/Session/QualityTuner.swift | 2 +- .../Util/Extension/SwiftUI+Extension.swift | 6 +-- Tests/Tests/Tuner/ConnectionTunerTests.swift | 8 ++-- Tests/Tests/Tuner/DeviceTunerTests.swift | 16 +++---- Tests/Tests/Tuner/SessionTunerTests.swift | 10 ++--- 22 files changed, 103 insertions(+), 103 deletions(-) diff --git a/Demo/Aespa-iOS/SettingView.swift b/Demo/Aespa-iOS/SettingView.swift index 02a1e0a..5e60a15 100644 --- a/Demo/Aespa-iOS/SettingView.swift +++ b/Demo/Aespa-iOS/SettingView.swift @@ -39,7 +39,7 @@ struct SettingView: View { } .modifier(TitledPicker(title: "Asset quality")) .onChange(of: quality) { newValue in - viewModel.aespaSession.setQuality(to: newValue) + viewModel.aespaSession.quality(to: newValue) } Picker("Focus", selection: $focusMode) { @@ -49,7 +49,7 @@ struct SettingView: View { } .modifier(TitledPicker(title: "Focus mode")) .onChange(of: focusMode) { newValue in - viewModel.aespaSession.setFocus(mode: newValue) + viewModel.aespaSession.focus(mode: newValue) } } @@ -74,7 +74,7 @@ struct SettingView: View { } .modifier(TitledPicker(title: "Flash mode")) .onChange(of: flashMode) { newValue in - viewModel.aespaSession.setFlashMode(to: newValue) + viewModel.aespaSession.flashMode(to: newValue) } } } diff --git a/Demo/Aespa-iOS/VideoContentView.swift b/Demo/Aespa-iOS/VideoContentView.swift index 397d084..5ecc24c 100644 --- a/Demo/Aespa-iOS/VideoContentView.swift +++ b/Demo/Aespa-iOS/VideoContentView.swift @@ -79,7 +79,7 @@ struct VideoContentView: View { // Position change + button Button(action: { - viewModel.aespaSession.setPosition(to: isFront ? .back : .front) + viewModel.aespaSession.position(to: isFront ? .back : .front) isFront.toggle() }) { Image(systemName: "arrow.triangle.2.circlepath.camera.fill") diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index 71bc98d..ac7ab47 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -37,21 +37,21 @@ class VideoContentViewModel: ObservableObject { // MARK: Settings should be done after `configure` // Common setting aespaSession - .setFocus(mode: .autoFocus) - .setChangeMonitoring(enabled: true) - .setOrientation(to: .portrait) - .setQuality(to: .high) + .focus(mode: .autoFocus) + .changeMonitoring(enabled: true) + .orientation(to: .portrait) + .quality(to: .high) .custom(WideColorCameraTuner()) // Photo-only setting aespaSession - .setFlashMode(to: .on) + .flashMode(to: .on) .redEyeReduction(enabled: true) // Video-only setting aespaSession .mute() - .setStabilization(mode: .auto) + .stabilization(mode: .auto) // Prepare video album cover aespaSession.videoFilePublisher diff --git a/README.md b/README.md index 84ae22f..58ab27b 100644 --- a/README.md +++ b/README.md @@ -141,10 +141,10 @@ One of our main feature, `InteractivePreview` provides a comprehensive and intui | Common | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| | ✨ `zoom` | Modifies the zoom factor. | -| ✨ `setPosition` | Changes the camera position. | -| `setOrientation` | Modifies the orientation. | -| `setFocus` | Alters the autofocusing mode. | -| `setQuality` | Adjusts the video quality preset for the recording session. | +| ✨ `position` | Changes the camera position. | +| `orientation` | Modifies the orientation. | +| `focus` | Alters the autofocusing mode. | +| `quality` | Adjusts the video quality preset for the recording session. | | `doctor` | Checks if essential conditions to start recording are satisfied. | | `previewLayerPublisher` | Responsible for emitting updates to the preview layer. | @@ -154,8 +154,8 @@ One of our main feature, `InteractivePreview` provides a comprehensive and intui | ✨ `stopRecording` | Terminates the current video recording session and attempts to save the video file. | | `mute` | Mutes the audio input. | | `unmute` | Restores the audio input. | -| `setStabilization` | Alters the stabilization mode. | -| `setTorch` | Adjusts the torch mode and level. | +| `stabilization` | Alters the stabilization mode. | +| `torch` | Adjusts the torch mode and level. | | `customize` | Customizes the session with a specific tuning configuration. | | ✨ `fetchVideoFiles` | Fetches a list of recorded video files. | | `videoFilePublisher` | Emits a `Result` object containing a latest video file data. | @@ -163,7 +163,7 @@ One of our main feature, `InteractivePreview` provides a comprehensive and intui | Photo | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| | ✨ `capturePhoto` | Capture a photo and returns a result image file. | -| ✨ `setFlashMode` | Sets the flash mode for the photo capture session. | +| ✨ `flashMode` | Sets the flash mode for the photo capture session. | | `redEyeReduction` | Enables or disables red-eye reduction for the photo capture session. | | `customize` | Customizes the photo capture session with a specific `AVCapturePhotoSettings`. | | ✨ `fetchPhotoFiles` | Fetches a list of captured photos files. | @@ -215,20 +215,20 @@ Task(priority: .background) { ``` Swift // Common setting aespaSession - .setAutofocusing(mode: .continuousAutoFocus) - .setOrientation(to: .portrait) - .setQuality(to: .high) + .autofocusing(mode: .continuousAutoFocus) + .orientation(to: .portrait) + .quality(to: .high) .customize(WideColorCameraTuner()) // Photo-only setting aespaSession - .setFlashMode(to: .on) + .flashMode(to: .on) .redEyeReduction(enabled: true) // Video-only setting aespaSession .mute() - .setStabilization(mode: .auto) + .stabilization(mode: .auto) ``` ### Recording & Capture @@ -281,9 +281,9 @@ class VideoContentViewModel: ObservableObject { do { try await Aespa.configure() aespaSession - .setAutofocusing(mode: .continuousAutoFocus) - .setOrientation(to: .portrait) - .setQuality(to: .high) + .autofocusing(mode: .continuousAutoFocus) + .orientation(to: .portrait) + .quality(to: .high) // Other settings ... diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 57927d1..d811192 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -207,14 +207,14 @@ extension AespaSession: CommonContext { } @discardableResult - public func setQualityWithError(to preset: AVCaptureSession.Preset) throws -> AespaSession { + public func qualityWithError(to preset: AVCaptureSession.Preset) throws -> AespaSession { let tuner = QualityTuner(videoQuality: preset) try coreSession.run(tuner) return self } @discardableResult - public func setPositionWithError(to position: AVCaptureDevice.Position) throws -> AespaSession { + public func positionWithError(to position: AVCaptureDevice.Position) throws -> AespaSession { let tuner = CameraPositionTuner(position: position, devicePreference: option.session.cameraDevicePreference) try coreSession.run(tuner) @@ -222,14 +222,14 @@ extension AespaSession: CommonContext { } @discardableResult - public func setOrientationWithError(to orientation: AVCaptureVideoOrientation) throws -> AespaSession { + public func orientationWithError(to orientation: AVCaptureVideoOrientation) throws -> AespaSession { let tuner = VideoOrientationTuner(orientation: orientation) try coreSession.run(tuner) return self } @discardableResult - public func setFocusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil) throws -> AespaSession { + public func focusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil) throws -> AespaSession { let tuner = FocusTuner(mode: mode, point: point) try coreSession.run(tuner) return self @@ -243,7 +243,7 @@ extension AespaSession: CommonContext { } @discardableResult - public func setChangeMonitoringWithError(enabled: Bool) throws -> AespaSession { + public func changeMonitoringWithError(enabled: Bool) throws -> AespaSession { let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) try coreSession.run(tuner) return self @@ -294,13 +294,13 @@ extension AespaSession: VideoContext { } @discardableResult - public func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoSessionContext { - try videoContext.setStabilizationWithError(mode: mode) + public func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoSessionContext { + try videoContext.stabilizationWithError(mode: mode) } @discardableResult - public func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoSessionContext { - try videoContext.setTorchWithError(mode: mode, level: level) + public func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoSessionContext { + try videoContext.torchWithError(mode: mode, level: level) } public func fetchVideoFiles(limit: Int) -> [VideoFile] { @@ -326,8 +326,8 @@ extension AespaSession: PhotoContext { } @discardableResult - public func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { - photoContext.setFlashMode(to: mode) + public func flashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { + photoContext.flashMode(to: mode) } @discardableResult diff --git a/Sources/Aespa/Core/Context/AespaPhotoContext.swift b/Sources/Aespa/Core/Context/AespaPhotoContext.swift index e7c9d63..5ae561c 100644 --- a/Sources/Aespa/Core/Context/AespaPhotoContext.swift +++ b/Sources/Aespa/Core/Context/AespaPhotoContext.swift @@ -94,7 +94,7 @@ extension AespaPhotoContext: PhotoContext { } @discardableResult - public func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { + public func flashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { photoSetting.flashMode = mode return self } diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index e68f091..82da2bf 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -84,7 +84,7 @@ extension AespaVideoContext: VideoContext { extension: "mp4") if option.session.autoVideoOrientationEnabled { - try commonContext.setOrientationWithError(to: UIDevice.current.orientation.toVideoOrientation) + try commonContext.orientationWithError(to: UIDevice.current.orientation.toVideoOrientation) } try recorder.startRecording(in: filePath) @@ -117,14 +117,14 @@ extension AespaVideoContext: VideoContext { } @discardableResult - public func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoContext { + public func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoContext { let tuner = VideoStabilizationTuner(stabilzationMode: mode) try coreSession.run(tuner) return self } @discardableResult - public func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoContext { + public func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoContext { let tuner = TorchTuner(level: level, torchMode: mode) try coreSession.run(tuner) return self diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 02f488a..6853641 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -31,7 +31,7 @@ public protocol CommonContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setQualityWithError(to preset: AVCaptureSession.Preset) throws -> CommonContextType + @discardableResult func qualityWithError(to preset: AVCaptureSession.Preset) throws -> CommonContextType /// Sets the camera position for the video recording session. /// @@ -42,7 +42,7 @@ public protocol CommonContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setPositionWithError(to position: AVCaptureDevice.Position) throws -> CommonContextType + @discardableResult func positionWithError(to position: AVCaptureDevice.Position) throws -> CommonContextType /// Sets the orientation for the session. /// @@ -54,7 +54,7 @@ public protocol CommonContext { /// /// - Note: It sets the orientation of the video you are recording, /// not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func setOrientationWithError( + @discardableResult func orientationWithError( to orientation: AVCaptureVideoOrientation ) throws -> CommonContextType @@ -65,7 +65,7 @@ public protocol CommonContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setFocusWithError( + @discardableResult func focusWithError( mode: AVCaptureDevice.FocusMode, point: CGPoint? ) throws -> CommonContextType @@ -78,7 +78,7 @@ public protocol CommonContext { /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult func zoomWithError(factor: CGFloat) throws -> CommonContextType - @discardableResult func setChangeMonitoringWithError(enabled: Bool) throws -> CommonContextType + @discardableResult func changeMonitoringWithError(enabled: Bool) throws -> CommonContextType /// This function provides a way to use a custom tuner to modify the current session. /// The tuner must conform to `AespaSessionTuning`. @@ -103,12 +103,12 @@ extension CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setQuality( + public func quality( to preset: AVCaptureSession.Preset, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setQualityWithError(to: preset) + return try self.qualityWithError(to: preset) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -125,12 +125,12 @@ extension CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setPosition( + public func position( to position: AVCaptureDevice.Position, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setPositionWithError(to: position) + return try self.positionWithError(to: position) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -150,12 +150,12 @@ extension CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setOrientation( + public func orientation( to orientation: AVCaptureVideoOrientation, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setOrientationWithError(to: orientation) + return try self.orientationWithError(to: orientation) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -172,13 +172,13 @@ extension CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setFocus( + public func focus( mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setFocusWithError(mode: mode, point: point) + return try self.focusWithError(mode: mode, point: point) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -210,12 +210,12 @@ extension CommonContext { } @discardableResult - public func setChangeMonitoring( + public func changeMonitoring( enabled: Bool, errorHandler: ErrorHandler? = nil ) -> CommonContextType { do { - return try self.setChangeMonitoringWithError(enabled: enabled) + return try self.changeMonitoringWithError(enabled: enabled) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -305,7 +305,7 @@ public protocol VideoContext { /// - Throws: `AespaError` if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> VideoContextType + @discardableResult func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> VideoContextType /// Sets the torch mode and level for the video recording session. /// @@ -319,7 +319,7 @@ public protocol VideoContext { /// /// - Note: This function might throw an error if the torch mode is not supported, /// or the specified level is not within the acceptable range. - @discardableResult func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> VideoContextType + @discardableResult func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> VideoContextType /// Fetches a list of recorded video files. /// The number of files fetched is controlled by the limit parameter. @@ -420,12 +420,12 @@ extension VideoContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. @discardableResult - public func setStabilization( + public func stabilization( mode: AVCaptureVideoStabilizationMode, errorHandler: ErrorHandler? = nil ) -> VideoContextType { do { - return try self.setStabilizationWithError(mode: mode) + return try self.stabilizationWithError(mode: mode) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -447,13 +447,13 @@ extension VideoContext { /// - Note: This function might throw an error if the torch mode is not supported, /// or the specified level is not within the acceptable range. @discardableResult - public func setTorch( + public func torch( mode: AVCaptureDevice.TorchMode, level: Float, errorHandler: ErrorHandler? = nil ) -> VideoContextType { do { - return try self.setTorchWithError(mode: mode, level: level) + return try self.torchWithError(mode: mode, level: level) } catch let error { errorHandler?(error) Logger.log(error: error) // Logs any errors encountered during the operation @@ -513,7 +513,7 @@ public protocol PhotoContext { /// /// - Parameter mode: The `AVCaptureDevice.FlashMode` to set for the camera. /// - Returns: The updated `AespaPhotoContext` instance. - @discardableResult func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> PhotoContextType + @discardableResult func flashMode(to mode: AVCaptureDevice.FlashMode) -> PhotoContextType /// Sets the red eye reduction mode for the camera and returns the updated `AespaPhotoContext` instance. /// The returned instance can be used for chaining configuration. diff --git a/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift index 7b04560..3eedc79 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift @@ -12,16 +12,16 @@ protocol AespaCaptureConnectionRepresentable { var videoOrientation: AVCaptureVideoOrientation { get set } var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode { get set } - func setOrientation(to orientation: AVCaptureVideoOrientation) - func setStabilizationMode(to mode: AVCaptureVideoStabilizationMode) + func orientation(to orientation: AVCaptureVideoOrientation) + func stabilizationMode(to mode: AVCaptureVideoStabilizationMode) } extension AVCaptureConnection: AespaCaptureConnectionRepresentable { - func setOrientation(to orientation: AVCaptureVideoOrientation) { + func orientation(to orientation: AVCaptureVideoOrientation) { self.videoOrientation = orientation } - func setStabilizationMode(to mode: AVCaptureVideoStabilizationMode) { + func stabilizationMode(to mode: AVCaptureVideoStabilizationMode) { self.preferredVideoStabilizationMode = mode } } diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index 903579f..55bfda4 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -19,14 +19,14 @@ protocol AespaCaptureDeviceRepresentable: NSObject { func isFocusModeSupported(_ focusMode: AVCaptureDevice.FocusMode) -> Bool - func setZoomFactor(_ factor: CGFloat) - func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) - func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) + func zoomFactor(_ factor: CGFloat) + func focusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) + func torchMode(_ torchMode: AVCaptureDevice.TorchMode) func setTorchModeOn(level torchLevel: Float) throws } extension AVCaptureDevice: AespaCaptureDeviceRepresentable { - func setTorchMode(_ torchMode: TorchMode) { + func torchMode(_ torchMode: TorchMode) { switch torchMode { case .off: self.torchMode = .off @@ -39,14 +39,14 @@ extension AVCaptureDevice: AespaCaptureDeviceRepresentable { } } - func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) { + func focusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) { self.focusMode = focusMode if let point { self.focusPointOfInterest = point } } - func setZoomFactor(_ factor: CGFloat) { + func zoomFactor(_ factor: CGFloat) { self.videoZoomFactor = factor } diff --git a/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift index dddf3b3..98b199b 100644 --- a/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift @@ -60,13 +60,13 @@ public protocol AespaCoreSessionRepresentable { /// Sets the position of the camera. /// Throws an error if the operation fails. - func setCameraPosition( + func cameraPosition( to position: AVCaptureDevice.Position, device deviceType: AVCaptureDevice.DeviceType? ) throws /// Sets the video quality preset. - func setVideoQuality(to preset: AVCaptureSession.Preset) throws + func videoQuality(to preset: AVCaptureSession.Preset) throws } extension AespaCoreSession: AespaCoreSessionRepresentable { @@ -187,7 +187,7 @@ extension AespaCoreSession: AespaCoreSessionRepresentable { } // MARK: - Option related - func setCameraPosition( + func cameraPosition( to position: AVCaptureDevice.Position, device deviceType: AVCaptureDevice.DeviceType? ) throws { @@ -217,7 +217,7 @@ extension AespaCoreSession: AespaCoreSessionRepresentable { } } - func setVideoQuality(to preset: AVCaptureSession.Preset) { + func videoQuality(to preset: AVCaptureSession.Preset) { let session = self session.sessionPreset = preset diff --git a/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift b/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift index e64c697..f38e25a 100644 --- a/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift +++ b/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift @@ -11,6 +11,6 @@ struct VideoOrientationTuner: AespaConnectionTuning { var orientation: AVCaptureVideoOrientation func tune(_ connection: T) throws { - connection.setOrientation(to: orientation) + connection.orientation(to: orientation) } } diff --git a/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift b/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift index ad555b2..5877f78 100644 --- a/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift +++ b/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift @@ -11,6 +11,6 @@ struct VideoStabilizationTuner: AespaConnectionTuning { var stabilzationMode: AVCaptureVideoStabilizationMode func tune(_ connection: T) { - connection.setStabilizationMode(to: stabilzationMode) + connection.stabilizationMode(to: stabilzationMode) } } diff --git a/Sources/Aespa/Tuner/Device/FocusTuner.swift b/Sources/Aespa/Tuner/Device/FocusTuner.swift index 4268f95..084a4c9 100644 --- a/Sources/Aespa/Tuner/Device/FocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/FocusTuner.swift @@ -28,6 +28,6 @@ struct FocusTuner: AespaDeviceTuning { ) } - device.setFocusMode(mode, point: parsedPoint) + device.focusMode(mode, point: parsedPoint) } } diff --git a/Sources/Aespa/Tuner/Device/TorchTuner.swift b/Sources/Aespa/Tuner/Device/TorchTuner.swift index 7a2cac5..18b9001 100644 --- a/Sources/Aespa/Tuner/Device/TorchTuner.swift +++ b/Sources/Aespa/Tuner/Device/TorchTuner.swift @@ -17,7 +17,7 @@ struct TorchTuner: AespaDeviceTuning { throw AespaError.device(reason: .unsupported) } - device.setTorchMode(torchMode) + device.torchMode(torchMode) try device.setTorchModeOn(level: level) } } diff --git a/Sources/Aespa/Tuner/Device/ZoomTuner.swift b/Sources/Aespa/Tuner/Device/ZoomTuner.swift index 806c898..3177db2 100644 --- a/Sources/Aespa/Tuner/Device/ZoomTuner.swift +++ b/Sources/Aespa/Tuner/Device/ZoomTuner.swift @@ -12,6 +12,6 @@ struct ZoomTuner: AespaDeviceTuning { var zoomFactor: CGFloat func tune(_ device: T) { - device.setZoomFactor(zoomFactor) + device.zoomFactor(zoomFactor) } } diff --git a/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift b/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift index fd0da89..23237fc 100644 --- a/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift +++ b/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift @@ -18,6 +18,6 @@ struct CameraPositionTuner: AespaSessionTuning { } func tune(_ session: T) throws { - try session.setCameraPosition(to: position, device: devicePreference) + try session.cameraPosition(to: position, device: devicePreference) } } diff --git a/Sources/Aespa/Tuner/Session/QualityTuner.swift b/Sources/Aespa/Tuner/Session/QualityTuner.swift index dc85bc3..ad9622d 100644 --- a/Sources/Aespa/Tuner/Session/QualityTuner.swift +++ b/Sources/Aespa/Tuner/Session/QualityTuner.swift @@ -12,6 +12,6 @@ struct QualityTuner: AespaSessionTuning { var videoQuality: AVCaptureSession.Preset func tune(_ session: T) throws { - try session.setVideoQuality(to: self.videoQuality) + try session.videoQuality(to: self.videoQuality) } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index ae23a8f..fe061fa 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -117,7 +117,7 @@ private extension InteractivePreview { return TapGesture(count: 2).onEnded { let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back - session.setPosition(to: nextPosition) + session.position(to: nextPosition) } } @@ -135,7 +135,7 @@ private extension InteractivePreview { return } - session.setFocus(mode: currentFocusMode, point: value.location) + session.focus(mode: currentFocusMode, point: value.location) focusingLocation = value.location if enableShowingCrosshair { @@ -166,7 +166,7 @@ private extension InteractivePreview { func resetFocusMode(to focusMode: AVCaptureDevice.FocusMode) { guard session.isRunning else { return } - session.setFocus(mode: focusMode) + session.focus(mode: focusMode) } func showCrosshair() { diff --git a/Tests/Tests/Tuner/ConnectionTunerTests.swift b/Tests/Tests/Tuner/ConnectionTunerTests.swift index 90767ff..a6872c1 100644 --- a/Tests/Tests/Tuner/ConnectionTunerTests.swift +++ b/Tests/Tests/Tuner/ConnectionTunerTests.swift @@ -28,14 +28,14 @@ final class ConnectionTunerTests: XCTestCase { let tuner = VideoOrientationTuner(orientation: orientation) stub(connection) { proxy in - when(proxy.setOrientation(to: equal(to: orientation))).then { value in + when(proxy.orientation(to: equal(to: orientation))).then { value in when(proxy.videoOrientation.get).thenReturn(orientation) } } try tuner.tune(connection) verify(connection) - .setOrientation(to: equal(to: orientation)) + .orientation(to: equal(to: orientation)) .with(returnType: Void.self) XCTAssertEqual(connection.videoOrientation, orientation) @@ -46,14 +46,14 @@ final class ConnectionTunerTests: XCTestCase { let tuner = VideoStabilizationTuner(stabilzationMode: mode) stub(connection) { proxy in - when(proxy.setStabilizationMode(to: equal(to: mode))).then { value in + when(proxy.stabilizationMode(to: equal(to: mode))).then { value in when(proxy.preferredVideoStabilizationMode.get).thenReturn(mode) } } tuner.tune(connection) verify(connection) - .setStabilizationMode(to: equal(to: mode)) + .stabilizationMode(to: equal(to: mode)) .with(returnType: Void.self) XCTAssertEqual(connection.preferredVideoStabilizationMode, mode) diff --git a/Tests/Tests/Tuner/DeviceTunerTests.swift b/Tests/Tests/Tuner/DeviceTunerTests.swift index a01dd74..4b717df 100644 --- a/Tests/Tests/Tuner/DeviceTunerTests.swift +++ b/Tests/Tests/Tuner/DeviceTunerTests.swift @@ -30,14 +30,14 @@ final class DeviceTunerTests: XCTestCase { stub(device) { proxy in when(proxy.isFocusModeSupported(equal(to: mode))).thenReturn(true) - when(proxy.setFocusMode(equal(to: mode), point: equal(to: point))).then { mode in + when(proxy.focusMode(equal(to: mode), point: equal(to: point))).then { mode in when(proxy.focusMode.get).thenReturn(.locked) } } try tuner.tune(device) verify(device) - .setFocusMode(equal(to: mode), point: equal(to: point)) + .focusMode(equal(to: mode), point: equal(to: point)) .with(returnType: Void.self) XCTAssertEqual(device.focusMode, mode) @@ -48,14 +48,14 @@ final class DeviceTunerTests: XCTestCase { let tuner = ZoomTuner(zoomFactor: factor) stub(device) { proxy in - when(proxy.setZoomFactor(equal(to: factor))).then { factor in + when(proxy.zoomFactor(equal(to: factor))).then { factor in when(proxy.videoZoomFactor.get).thenReturn(factor) } } tuner.tune(device) verify(device) - .setZoomFactor(equal(to: factor)) + .zoomFactor(equal(to: factor)) .with(returnType: Void.self) XCTAssertEqual(device.videoZoomFactor, factor) @@ -68,17 +68,17 @@ final class DeviceTunerTests: XCTestCase { stub(device) { proxy in when(proxy.hasTorch.get).thenReturn(true) - when(proxy.setTorchMode(equal(to: mode))).thenDoNothing() - when(proxy.setTorchModeOn(level: level)).thenDoNothing() + when(proxy.torchMode(equal(to: mode))).thenDoNothing() + when(proxy.torchModeOn(level: level)).thenDoNothing() } try tuner.tune(device) verify(device) - .setTorchMode(equal(to: mode)) + .torchMode(equal(to: mode)) .with(returnType: Void.self) verify(device) - .setTorchModeOn(level: level) + .torchModeOn(level: level) .with(returnType: Void.self) } } diff --git a/Tests/Tests/Tuner/SessionTunerTests.swift b/Tests/Tests/Tuner/SessionTunerTests.swift index 6e1b708..c1eaac2 100644 --- a/Tests/Tests/Tuner/SessionTunerTests.swift +++ b/Tests/Tests/Tuner/SessionTunerTests.swift @@ -16,7 +16,7 @@ final class SessionTunerTests: XCTestCase { var mockSessionProtocol: MockAespaCoreSessionRepresentable! - override func setUpWithError() throws { + override func upWithError() throws { mockSessionProtocol = MockAespaCoreSessionRepresentable() } @@ -29,12 +29,12 @@ final class SessionTunerTests: XCTestCase { let tuner = QualityTuner(videoQuality: preset) stub(mockSessionProtocol) { proxy in - when(proxy.setVideoQuality(to: any())).thenDoNothing() + when(proxy.videoQuality(to: any())).thenDoNothing() } try tuner.tune(mockSessionProtocol) verify(mockSessionProtocol) - .setVideoQuality(to: equal(to: AVCaptureSession.Preset.cif352x288)) + .videoQuality(to: equal(to: AVCaptureSession.Preset.cif352x288)) .with(returnType: Void.self) } @@ -43,12 +43,12 @@ final class SessionTunerTests: XCTestCase { let tuner = CameraPositionTuner(position: position) stub(mockSessionProtocol) { proxy in - when(proxy.setCameraPosition(to: any(), device: any())).thenDoNothing() + when(proxy.cameraPosition(to: any(), device: any())).thenDoNothing() } try tuner.tune(mockSessionProtocol) verify(mockSessionProtocol) - .setCameraPosition(to: equal(to: AVCaptureDevice.Position.front), device: any()) + .cameraPosition(to: equal(to: AVCaptureDevice.Position.front), device: any()) .with(returnType: Void.self) } From bfa15ff2f436737117282a56f1700c0fa2e62cbb Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Wed, 28 Jun 2023 18:52:55 +0900 Subject: [PATCH 16/21] Add multi-thread support --- Demo/Aespa-iOS/VideoContentViewModel.swift | 92 +++++++++++----------- Sources/Aespa/AespaSession.swift | 1 + Sources/Aespa/Core/AespaCoreSession.swift | 29 +++++++ 3 files changed, 76 insertions(+), 46 deletions(-) diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index ac7ab47..c6779b3 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -30,55 +30,55 @@ class VideoContentViewModel: ObservableObject { let option = AespaOption(albumName: "Aespa-Demo") self.aespaSession = Aespa.session(with: option) + + // MARK: Settings should be done after `configure` + // Common setting + aespaSession + .focus(mode: .autoFocus) + .changeMonitoring(enabled: true) + .orientation(to: .portrait) + .quality(to: .high) + .custom(WideColorCameraTuner()) + + // Photo-only setting + aespaSession + .flashMode(to: .on) + .redEyeReduction(enabled: true) + + // Video-only setting + aespaSession + .mute() + .stabilization(mode: .auto) + + // Prepare video album cover + aespaSession.videoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.videoAlbumCover, on: self) + .store(in: &subscription) + + // Prepare photo album cover + aespaSession.photoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.photoAlbumCover, on: self) + .store(in: &subscription) + Task(priority: .background) { do { try await Aespa.configure() - - // MARK: Settings should be done after `configure` - // Common setting - aespaSession - .focus(mode: .autoFocus) - .changeMonitoring(enabled: true) - .orientation(to: .portrait) - .quality(to: .high) - .custom(WideColorCameraTuner()) - - // Photo-only setting - aespaSession - .flashMode(to: .on) - .redEyeReduction(enabled: true) - - // Video-only setting - aespaSession - .mute() - .stabilization(mode: .auto) - - // Prepare video album cover - aespaSession.videoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil - } - } - .assign(to: \.videoAlbumCover, on: self) - .store(in: &subscription) - - // Prepare photo album cover - aespaSession.photoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil - } - } - .assign(to: \.photoAlbumCover, on: self) - .store(in: &subscription) - } catch let error { print(error) } diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index d811192..22937fa 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -351,6 +351,7 @@ extension AespaSession: PhotoContext { extension AespaSession { func startSession() throws { let tuner = SessionLaunchTuner() + coreSession.start() try coreSession.run(tuner) previewLayerSubject.send(previewLayer) diff --git a/Sources/Aespa/Core/AespaCoreSession.swift b/Sources/Aespa/Core/AespaCoreSession.swift index 44ed487..ebe4dfb 100644 --- a/Sources/Aespa/Core/AespaCoreSession.swift +++ b/Sources/Aespa/Core/AespaCoreSession.swift @@ -12,12 +12,20 @@ import AVFoundation class AespaCoreSession: AVCaptureSession { var option: AespaOption + + private let lock = NSRecursiveLock() init(option: AespaOption) { self.option = option + lock.lock() } func run(_ tuner: T) throws { + lock.lock() + defer { + lock.unlock() + } + if tuner.needTransaction { self.beginConfiguration() } defer { if tuner.needTransaction { self.commitConfiguration() } @@ -27,6 +35,11 @@ class AespaCoreSession: AVCaptureSession { } func run(_ tuner: T) throws { + lock.lock() + defer { + lock.unlock() + } + guard let device = self.videoDeviceInput?.device else { throw AespaError.device(reason: .invalid) } @@ -40,6 +53,11 @@ class AespaCoreSession: AVCaptureSession { } func run(_ tuner: T) throws { + lock.lock() + defer { + lock.unlock() + } + guard let connection = self.connections.first else { throw AespaError.session(reason: .cannotFindConnection) } @@ -48,10 +66,21 @@ class AespaCoreSession: AVCaptureSession { } func run(_ processor: T) throws { + lock.lock() + defer { + lock.unlock() + } + guard let output = self.movieFileOutput else { throw AespaError.session(reason: .cannotFindConnection) } try processor.process(output) } + + func start() { + // Do soemthing + print("Start") + lock.unlock() + } } From 51d3201a439a774c3e84a18ed6e2309d24f23532 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Thu, 29 Jun 2023 11:51:42 +0900 Subject: [PATCH 17/21] Refactoring error throwing methods --- Demo/Aespa-iOS/VideoContentViewModel.swift | 98 ++-- Sources/Aespa/Aespa.swift | 27 +- Sources/Aespa/AespaSession.swift | 128 +++-- Sources/Aespa/Core/AespaCoreRecorder.swift | 21 +- Sources/Aespa/Core/AespaCoreSession.swift | 115 ++--- .../Core/Context/AespaPhotoContext.swift | 62 ++- .../Core/Context/AespaVideoContext.swift | 85 +-- Sources/Aespa/Core/Context/Context.swift | 488 +++--------------- ....swift => SessionConfigurationTuner.swift} | 4 +- .../Session/SessionTerminationTuner.swift | 2 + .../Util/Extension/SwiftUI+Extension.swift | 4 +- 11 files changed, 374 insertions(+), 660 deletions(-) rename Sources/Aespa/Tuner/Session/{SessionLaunchTuner.swift => SessionConfigurationTuner.swift} (76%) diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index c6779b3..801a952 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -30,58 +30,56 @@ class VideoContentViewModel: ObservableObject { let option = AespaOption(albumName: "Aespa-Demo") self.aespaSession = Aespa.session(with: option) - - // MARK: Settings should be done after `configure` - // Common setting - aespaSession - .focus(mode: .autoFocus) - .changeMonitoring(enabled: true) - .orientation(to: .portrait) - .quality(to: .high) - .custom(WideColorCameraTuner()) - - // Photo-only setting - aespaSession - .flashMode(to: .on) - .redEyeReduction(enabled: true) - - // Video-only setting - aespaSession - .mute() - .stabilization(mode: .auto) - - // Prepare video album cover - aespaSession.videoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil + do { + // MARK: Settings should be done after `configure` + // Common setting + aespaSession + .focus(mode: .continuousAutoFocus) + .changeMonitoring(enabled: true) + .orientation(to: .portrait) + .quality(to: .high) + .custom(WideColorCameraTuner()) + + // Photo-only setting + aespaSession + .flashMode(to: .on) + .redEyeReduction(enabled: true) + + // Video-only setting + aespaSession + .mute() + .stabilization(mode: .auto) + + // Prepare video album cover + aespaSession.videoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } } - } - .assign(to: \.videoAlbumCover, on: self) - .store(in: &subscription) - - // Prepare photo album cover - aespaSession.photoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil + .assign(to: \.videoAlbumCover, on: self) + .store(in: &subscription) + + // Prepare photo album cover + aespaSession.photoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } } - } - .assign(to: \.photoAlbumCover, on: self) - .store(in: &subscription) - - Task(priority: .background) { - do { - try await Aespa.configure() - } catch let error { - print(error) - } + .assign(to: \.photoAlbumCover, on: self) + .store(in: &subscription) + + try Aespa.configure() + + } catch let error { + print(error) } } diff --git a/Sources/Aespa/Aespa.swift b/Sources/Aespa/Aespa.swift index eef26e6..0e15bad 100644 --- a/Sources/Aespa/Aespa.swift +++ b/Sources/Aespa/Aespa.swift @@ -36,32 +36,33 @@ open class Aespa { /// /// - Warning: This method is synchronous and blocks until the session starts running or it fails, /// which it reports by posting an `AVCaptureSessionRuntimeError` notification. - public static func configure() async throws { - guard let core = core else { + public static func configure(_ errorHandler: @escaping ErrorHandler = { _ in }) throws { + guard let core else { throw AespaError.session(reason: .notConfigured) } - guard - case .permitted = await AuthorizationChecker.checkCaptureAuthorizationStatus() - else { - throw AespaError.permission(reason: .denied) - } - - try core.startSession() + Task { + guard + case .permitted = await AuthorizationChecker.checkCaptureAuthorizationStatus() + else { + throw AespaError.permission(reason: .denied) + } - Logger.log(message: "Session is configured successfully") + Task.detached(priority: .background) { + core.startSession(errorHandler) + } + } } /// Terminates the current `AespaSession`. /// /// If a session has been started, it stops the session and releases resources. /// After termination, a new session needs to be configured to start recording again. - public static func terminate() throws { + public static func terminate(_ errorHandler: @escaping ErrorHandler = { _ in }) throws { guard let core = core else { return } - try core.terminateSession() - Logger.log(message: "Session is terminated successfully") + core.terminateSession(errorHandler) } } diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 22937fa..7ac9cec 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -159,10 +159,7 @@ open class AespaSession { public func getSubjectAreaDidChangePublisher() -> AnyPublisher { if isSubjectAreaChangeMonitoringEnabled != true { Logger.log( - message: """ - `isSubjectAreaChangeMonitoringEnabled` is not set `true. - `AVCaptureDeviceSubjectAreaDidChange` publisher may not publish anything. - """) + message: "`isSubjectAreaChangeMonitoringEnabled` is not set `true`. `AVCaptureDeviceSubjectAreaDidChange` publisher may not publish anything.") } return NotificationCenter.default @@ -207,103 +204,103 @@ extension AespaSession: CommonContext { } @discardableResult - public func qualityWithError(to preset: AVCaptureSession.Preset) throws -> AespaSession { + public func quality(to preset: AVCaptureSession.Preset, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = QualityTuner(videoQuality: preset) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - + @discardableResult - public func positionWithError(to position: AVCaptureDevice.Position) throws -> AespaSession { + public func position(to position: AVCaptureDevice.Position, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = CameraPositionTuner(position: position, devicePreference: option.session.cameraDevicePreference) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - + @discardableResult - public func orientationWithError(to orientation: AVCaptureVideoOrientation) throws -> AespaSession { + public func orientation(to orientation: AVCaptureVideoOrientation, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = VideoOrientationTuner(orientation: orientation) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - + @discardableResult - public func focusWithError(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil) throws -> AespaSession { + public func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = FocusTuner(mode: mode, point: point) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - + @discardableResult - public func zoomWithError(factor: CGFloat) throws -> AespaSession { + public func zoom(factor: CGFloat, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = ZoomTuner(zoomFactor: factor) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - + @discardableResult - public func changeMonitoringWithError(enabled: Bool) throws -> AespaSession { + public func changeMonitoring(enabled: Bool, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) return self } - - public func customizeWithError(_ tuner: T) throws -> AespaSession { - try coreSession.run(tuner) + + @discardableResult + public func custom(_ tuner: T, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + coreSession.run(tuner, errorHandler) return self } } extension AespaSession: VideoContext { public typealias AespaVideoSessionContext = AespaVideoContext - + public var underlyingVideoContext: AespaVideoSessionContext { videoContext } - + public var videoFilePublisher: AnyPublisher, Never> { videoContext.videoFilePublisher } - + public var isRecording: Bool { videoContext.isRecording } - + public var isMuted: Bool { videoContext.isMuted } - - public func startRecordingWithError() throws { - try videoContext.startRecordingWithError() + + public func startRecording(_ errorHandler: @escaping ErrorHandler = { _ in }) { + videoContext.startRecording(errorHandler) } - @discardableResult - public func stopRecordingWithError() async throws -> VideoFile { - try await videoContext.stopRecordingWithError() + public func stopRecording(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + videoContext.stopRecording(completionHandler) } - + @discardableResult - public func muteWithError() throws -> AespaVideoSessionContext { - try videoContext.muteWithError() + public func mute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.mute(errorHandler) } - + @discardableResult - public func unmuteWithError() throws -> AespaVideoSessionContext { - try videoContext.unmuteWithError() + public func unmute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.unmute(errorHandler) } - + @discardableResult - public func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoSessionContext { - try videoContext.stabilizationWithError(mode: mode) + public func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.stabilization(mode: mode, errorHandler) } - + @discardableResult - public func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoSessionContext { - try videoContext.torchWithError(mode: mode, level: level) + public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.torch(mode: mode, level: level, errorHandler) } - - public func fetchVideoFiles(limit: Int) -> [VideoFile] { + + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { videoContext.fetchVideoFiles(limit: limit) } } @@ -321,10 +318,11 @@ extension AespaSession: PhotoContext { photoContext.currentSetting } - public func capturePhotoWithError() async throws -> PhotoFile { - try await photoContext.capturePhotoWithError() - } + public func capturePhoto(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + photoContext.capturePhoto(completionHandler) + } + @discardableResult public func flashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { photoContext.flashMode(to: mode) @@ -334,31 +332,27 @@ extension AespaSession: PhotoContext { public func redEyeReduction(enabled: Bool) -> AespaPhotoContext { photoContext.redEyeReduction(enabled: enabled) } - - public func custom(_ setting: AVCapturePhotoSettings) { - photoSetting = setting - } - - public func fetchPhotoFiles(limit: Int) -> [PhotoFile] { - photoContext.fetchPhotoFiles(limit: limit) - } + @discardableResult public func custom(_ setting: AVCapturePhotoSettings) -> AespaPhotoContext { photoContext.custom(setting) } + + public func fetchPhotoFiles(limit: Int = 0) -> [PhotoFile] { + photoContext.fetchPhotoFiles(limit: limit) + } } extension AespaSession { - func startSession() throws { - let tuner = SessionLaunchTuner() - coreSession.start() - try coreSession.run(tuner) - + func startSession(_ errorHandler: @escaping ErrorHandler) { + let tuner = SessionConfigurationTuner() + coreSession.run(tuner, errorHandler) + previewLayerSubject.send(previewLayer) } - - func terminateSession() throws { + + func terminateSession(_ errorHandler: @escaping ErrorHandler) { let tuner = SessionTerminationTuner() - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) } } diff --git a/Sources/Aespa/Core/AespaCoreRecorder.swift b/Sources/Aespa/Core/AespaCoreRecorder.swift index bf1c2b9..aac6e03 100644 --- a/Sources/Aespa/Core/AespaCoreRecorder.swift +++ b/Sources/Aespa/Core/AespaCoreRecorder.swift @@ -21,23 +21,28 @@ class AespaCoreRecorder: NSObject { self.core = core } - func run(processor: T) throws { + func run(processor: T, _ errorHandler: @escaping ErrorHandler) { guard let output = core.movieFileOutput else { - throw AespaError.session(reason: .cannotFindConnection) + errorHandler(AespaError.session(reason: .cannotFindConnection)) + return } - try processor.process(output) + do { + try processor.process(output) + } catch { + errorHandler(error) + } } } extension AespaCoreRecorder { - func startRecording(in filePath: URL) throws { - try run(processor: StartRecordProcessor(filePath: filePath, delegate: self)) + func startRecording(in filePath: URL, _ errorHandler: @escaping ErrorHandler) { + run(processor: StartRecordProcessor(filePath: filePath, delegate: self), errorHandler) } - + func stopRecording() async throws -> URL { - try run(processor: FinishRecordProcessor()) - + run(processor: FinishRecordProcessor(), { _ in }) + return try await withCheckedThrowingContinuation { continuation in fileIOResultSubsciption = fileIOResultSubject.sink { _ in // Do nothing on completion; we're only interested in values. diff --git a/Sources/Aespa/Core/AespaCoreSession.swift b/Sources/Aespa/Core/AespaCoreSession.swift index ebe4dfb..773e96e 100644 --- a/Sources/Aespa/Core/AespaCoreSession.swift +++ b/Sources/Aespa/Core/AespaCoreSession.swift @@ -13,74 +13,71 @@ import AVFoundation class AespaCoreSession: AVCaptureSession { var option: AespaOption - private let lock = NSRecursiveLock() - + var workQueue = DispatchQueue(label: "coreSession.workQueue", qos: .background) + init(option: AespaOption) { self.option = option - lock.lock() } - - func run(_ tuner: T) throws { - lock.lock() - defer { - lock.unlock() - } - - if tuner.needTransaction { self.beginConfiguration() } - defer { - if tuner.needTransaction { self.commitConfiguration() } - } - - try tuner.tune(self) - } - - func run(_ tuner: T) throws { - lock.lock() - defer { - lock.unlock() - } - - guard let device = self.videoDeviceInput?.device else { - throw AespaError.device(reason: .invalid) - } - - if tuner.needLock { try device.lockForConfiguration() } - defer { - if tuner.needLock { device.unlockForConfiguration() } + + func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { + workQueue.async { + do { + if tuner.needTransaction { self.beginConfiguration() } + defer { + if tuner.needTransaction { self.commitConfiguration() } + } + + try tuner.tune(self) + } catch let error { + errorHandler(error) + } } - - try tuner.tune(device) } - - func run(_ tuner: T) throws { - lock.lock() - defer { - lock.unlock() - } - - guard let connection = self.connections.first else { - throw AespaError.session(reason: .cannotFindConnection) + + func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { + workQueue.async { + do { + guard let device = self.videoDeviceInput?.device else { + throw AespaError.device(reason: .invalid) + } + + if tuner.needLock { try device.lockForConfiguration() } + defer { + if tuner.needLock { device.unlockForConfiguration() } + } + + try tuner.tune(device) + } catch let error { + errorHandler(error) + } } - - try tuner.tune(connection) } - - func run(_ processor: T) throws { - lock.lock() - defer { - lock.unlock() - } - - guard let output = self.movieFileOutput else { - throw AespaError.session(reason: .cannotFindConnection) + + func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { + workQueue.async { + do { + guard let connection = self.connections.first else { + throw AespaError.session(reason: .cannotFindConnection) + } + + try tuner.tune(connection) + } catch let error { + errorHandler(error) + } } - - try processor.process(output) } - func start() { - // Do soemthing - print("Start") - lock.unlock() + func run(_ processor: T, _ errorHandler: @escaping ErrorHandler) { + workQueue.async { + do { + guard let output = self.movieFileOutput else { + throw AespaError.session(reason: .cannotFindConnection) + } + + try processor.process(output) + } catch let error { + errorHandler(error) + } + } } } diff --git a/Sources/Aespa/Core/Context/AespaPhotoContext.swift b/Sources/Aespa/Core/Context/AespaPhotoContext.swift index 5ae561c..6b66bd5 100644 --- a/Sources/Aespa/Core/Context/AespaPhotoContext.swift +++ b/Sources/Aespa/Core/Context/AespaPhotoContext.swift @@ -67,30 +67,18 @@ extension AespaPhotoContext: PhotoContext { photoSetting } - public func capturePhotoWithError() async throws -> PhotoFile { - let setting = AVCapturePhotoSettings(from: photoSetting) - let rawPhotoAsset = try await camera.capture(setting: setting) - - guard let rawPhotoData = rawPhotoAsset.fileDataRepresentation() else { - throw AespaError.file(reason: .unableToFlatten) + public func capturePhoto( + _ completionHandler: @escaping (Result) -> Void + ) { + Task(priority: .utility) { + do { + let photoFile = try await self.capturePhotoWithError() + completionHandler(.success(photoFile)) + } catch let error { + Logger.log(error: error) + completionHandler(.failure(error)) + } } - - let filePath = try FilePathProvider.requestFilePath( - from: fileManager.systemFileManager, - directoryName: option.asset.albumName, - subDirectoryName: option.asset.photoDirectoryName, - fileName: option.asset.fileNameHandler()) - - try fileManager.write(data: rawPhotoData, to: filePath) - try await albumManager.addToAlbum(imageData: rawPhotoData) - - let photoFile = PhotoFileGenerator.generate( - with: filePath, - date: Date()) - - photoFileBufferSubject.send(.success(photoFile)) - - return photoFile } @discardableResult @@ -117,3 +105,31 @@ extension AespaPhotoContext: PhotoContext { count: limit) } } + +private extension AespaPhotoContext { + func capturePhotoWithError() async throws -> PhotoFile { + let setting = AVCapturePhotoSettings(from: photoSetting) + let rawPhotoAsset = try await camera.capture(setting: setting) + + guard let rawPhotoData = rawPhotoAsset.fileDataRepresentation() else { + throw AespaError.file(reason: .unableToFlatten) + } + + let filePath = try FilePathProvider.requestFilePath( + from: fileManager.systemFileManager, + directoryName: option.asset.albumName, + subDirectoryName: option.asset.photoDirectoryName, + fileName: option.asset.fileNameHandler()) + + try fileManager.write(data: rawPhotoData, to: filePath) + try await albumManager.addToAlbum(imageData: rawPhotoData) + + let photoFile = PhotoFileGenerator.generate( + with: filePath, + date: Date()) + + photoFileBufferSubject.send(.success(photoFile)) + + return photoFile + } +} diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index 82da2bf..ba35afe 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -60,10 +60,11 @@ extension AespaVideoContext: VideoContext { public var underlyingVideoContext: AespaVideoContext { self } - + public var isMuted: Bool { coreSession.audioDeviceInput == nil } + public var videoFilePublisher: AnyPublisher, Never> { videoFileBufferSubject.handleEvents(receiveOutput: { status in if case .failure(let error) = status { @@ -74,68 +75,84 @@ extension AespaVideoContext: VideoContext { .eraseToAnyPublisher() } - public func startRecordingWithError() throws { - let fileName = option.asset.fileNameHandler() - let filePath = try FilePathProvider.requestFilePath( - from: fileManager.systemFileManager, - directoryName: option.asset.albumName, - subDirectoryName: option.asset.videoDirectoryName, - fileName: fileName, - extension: "mp4") - - if option.session.autoVideoOrientationEnabled { - try commonContext.orientationWithError(to: UIDevice.current.orientation.toVideoOrientation) + public func startRecording(_ errorHandler: @escaping ErrorHandler = { _ in }) { + do { + let fileName = option.asset.fileNameHandler() + let filePath = try FilePathProvider.requestFilePath( + from: fileManager.systemFileManager, + directoryName: option.asset.albumName, + subDirectoryName: option.asset.videoDirectoryName, + fileName: fileName, + extension: "mp4") + + if option.session.autoVideoOrientationEnabled { + commonContext.orientation(to: UIDevice.current.orientation.toVideoOrientation, errorHandler) + } + + recorder.startRecording(in: filePath, errorHandler) + isRecording = true + } catch let error { + errorHandler(error) } - - try recorder.startRecording(in: filePath) - isRecording = true } - public func stopRecordingWithError() async throws -> VideoFile { - let videoFilePath = try await recorder.stopRecording() - let videoFile = VideoFileGenerator.generate(with: videoFilePath, date: Date()) + public func stopRecording(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + Task(priority: .utility) { + do { + let videoFilePath = try await recorder.stopRecording() + let videoFile = VideoFileGenerator.generate(with: videoFilePath, date: Date()) - try await albumManager.addToAlbum(filePath: videoFilePath) - videoFileBufferSubject.send(.success(videoFile)) + try await albumManager.addToAlbum(filePath: videoFilePath) + videoFileBufferSubject.send(.success(videoFile)) - isRecording = false - return videoFile + isRecording = false + completionHandler(.success(videoFile)) + } catch let error { + Logger.log(error: error) + completionHandler(.failure(error)) + } + } } @discardableResult - public func muteWithError() throws -> AespaVideoContext { + public func mute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: true) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) + return self } @discardableResult - public func unmuteWithError() throws -> AespaVideoContext { + public func unmute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: false) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) + return self } @discardableResult - public func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoContext { + public func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { let tuner = VideoStabilizationTuner(stabilzationMode: mode) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) + return self } @discardableResult - public func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoContext { + public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { let tuner = TorchTuner(level: level, torchMode: mode) - try coreSession.run(tuner) + coreSession.run(tuner, errorHandler) + return self } - public func customizewWithError(_ tuner: T) throws -> AespaVideoContext { - try coreSession.run(tuner) + public func customize(_ tuner: T, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { + coreSession.run(tuner, errorHandler) + return self } - - public func fetchVideoFiles(limit: Int) -> [VideoFile] { + + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { return fileManager.fetchVideo( albumName: option.asset.albumName, subDirectoryName: option.asset.videoDirectoryName, diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 6853641..d20bd91 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -13,233 +13,84 @@ import AVFoundation /// public typealias ErrorHandler = (Error) -> Void + /// A protocol that defines the common behaviors and properties that all context types must implement. /// /// It includes methods to control the quality, position, orientation, and auto-focusing behavior /// of the session. It also includes the ability to adjust the zoom level of the session. public protocol CommonContext { - /// associatedtype CommonContextType: CommonContext & VideoContext & PhotoContext - - /// + var underlyingCommonContext: CommonContextType { get } - + /// Sets the quality preset for the video recording session. /// - /// - Parameter preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. + /// - Parameters: + /// - preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func qualityWithError(to preset: AVCaptureSession.Preset) throws -> CommonContextType - + @discardableResult func quality(to preset: AVCaptureSession.Preset, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + /// Sets the camera position for the video recording session. /// /// It refers to `AespaOption.Session.cameraDevicePreference` when choosing the camera device. /// - /// - Parameter position: An `AVCaptureDevice.Position` value indicating the camera position to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. + /// - Parameters: + /// - position: An `AVCaptureDevice.Position` value indicating the camera position to be set. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func positionWithError(to position: AVCaptureDevice.Position) throws -> CommonContextType - + @discardableResult func position(to position: AVCaptureDevice.Position, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + /// Sets the orientation for the session. /// - /// - Parameter orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. + /// - Parameters: + /// - orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. /// - /// - Note: It sets the orientation of the video you are recording, - /// not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func orientationWithError( - to orientation: AVCaptureVideoOrientation - ) throws -> CommonContextType - + /// - Note: It sets the orientation of the video you are recording, not the orientation of the `AVCaptureVideoPreviewLayer`. + @discardableResult func orientation(to orientation: AVCaptureVideoOrientation, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + /// Sets the autofocusing mode for the video recording session. /// - /// - Parameter mode: The focus mode(`AVCaptureDevice.FocusMode`) for the session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. + /// - Parameters: + /// - mode: The focus mode(`AVCaptureDevice.FocusMode`) for the session. + /// - point: The point in the camera's field of view that the auto focus should prioritize. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func focusWithError( - mode: AVCaptureDevice.FocusMode, point: CGPoint? - ) throws -> CommonContextType - + @discardableResult func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint?, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + /// Sets the zoom factor for the video recording session. /// - /// - Parameter factor: A `CGFloat` value indicating the zoom factor to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. + /// - Parameters: + /// - factor: A `CGFloat` value indicating the zoom factor to be set. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func zoomWithError(factor: CGFloat) throws -> CommonContextType - - @discardableResult func changeMonitoringWithError(enabled: Bool) throws -> CommonContextType - - /// This function provides a way to use a custom tuner to modify the current session. - /// The tuner must conform to `AespaSessionTuning`. - /// - /// - Parameter tuner: An instance that conforms to `AespaSessionTuning`. - /// - Throws: If the session fails to run the tuner. - @discardableResult func customizeWithError(_ tuner: T) throws -> CommonContextType -} + @discardableResult func zoom(factor: CGFloat, _ errorHandler: @escaping ErrorHandler) -> CommonContextType -// MARK: Non-throwing methods -// These methods encapsulate error handling within the method itself rather than propagating it to the caller. -// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended -// because it offers less control to callers. -// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. -extension CommonContext { - /// Sets the quality preset for the video recording session. - /// - /// - Parameter preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func quality( - to preset: AVCaptureSession.Preset, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.qualityWithError(to: preset) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the camera position for the video recording session. - /// - /// - Parameter position: An `AVCaptureDevice.Position` value indicating the camera position to be set. + /// Changes monitoring status. /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func position( - to position: AVCaptureDevice.Position, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.positionWithError(to: position) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the orientation for the session. - /// - /// - Parameter orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Note: It sets the orientation of the video you are recording, - /// not the orientation of the `AVCaptureVideoPreviewLayer`. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func orientation( - to orientation: AVCaptureVideoOrientation, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.orientationWithError(to: orientation) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the autofocusing mode for the video recording session. - /// - /// - Parameter mode: The focus mode for the capture device. - /// - /// If an error occurs during the operation, the error is logged. + /// - Parameters: + /// - enabled: A boolean value to set monitoring status. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func focus( - mode: AVCaptureDevice.FocusMode, - point: CGPoint? = nil, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.focusWithError(mode: mode, point: point) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the zoom factor for the video recording session. - /// - /// - Parameter factor: A `CGFloat` value indicating the zoom factor to be set. + @discardableResult func changeMonitoring(enabled: Bool, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + + /// This function provides a way to use a custom tuner to modify the current session. + /// The tuner must conform to `AespaSessionTuning`. /// - /// If an error occurs during the operation, the error is logged. + /// - Parameters: + /// - tuner: An instance that conforms to `AespaSessionTuning`. + /// - errorHandler: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func zoom( - factor: CGFloat, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.zoomWithError(factor: factor) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - @discardableResult - public func changeMonitoring( - enabled: Bool, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.changeMonitoringWithError(enabled: enabled) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - - - @discardableResult - public func custom( - _ tuner: T, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.customizeWithError(tuner) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } + @discardableResult func custom(_ tuner: T, _ errorHandler: @escaping ErrorHandler) -> CommonContextType } /// A protocol that defines the behaviors and properties specific to the video context. @@ -248,11 +99,9 @@ extension CommonContext { /// the session is currently recording or muted, and controlling video recording, /// stabilization, torch mode, and fetching recorded video files. public protocol VideoContext { - /// associatedtype VideoContextType: VideoContext - /// var underlyingVideoContext: VideoContextType { get } - + /// A Boolean value that indicates whether the session is currently recording video. var isRecording: Bool { get } @@ -263,216 +112,72 @@ public protocol VideoContext { /// /// - Returns: `VideoFile` wrapped in a `Result` type. var videoFilePublisher: AnyPublisher, Never> { get } - + /// This property reflects the current state of audio input. /// /// If it returns `true`, the audio input is currently muted. var isMuted: Bool { get } - - /// - Throws: `AespaError` if the video file path request fails, - /// orientation setting fails, or starting the recording fails. - /// - /// - Note: If `autoVideoOrientation` option is enabled, - /// it sets the orientation according to the current device orientation. - func startRecordingWithError() throws - - /// Stops the ongoing video recording session and attempts to add the video file to the album. - /// - /// Supporting `async`, you can use this method in Swift Concurrency's context - /// - /// - Throws: `AespaError` if stopping the recording fails. - @discardableResult func stopRecordingWithError() async throws -> VideoFile - - /// Mutes the audio input for the video recording session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func muteWithError() throws -> VideoContextType - - /// Unmutes the audio input for the video recording session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func unmuteWithError() throws -> VideoContextType - - /// Sets the stabilization mode for the video recording session. - /// - /// - Parameter mode: An `AVCaptureVideoStabilizationMode` value - /// indicating the stabilization mode to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func stabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> VideoContextType - - /// Sets the torch mode and level for the video recording session. - /// - /// - Parameters: - /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). - /// - level: The desired torch level as a Float between 0.0 and 1.0. - /// - /// - Returns: Returns self, allowing additional settings to be configured. - /// - /// - Throws: Throws an error if setting the torch mode or level fails. - /// - /// - Note: This function might throw an error if the torch mode is not supported, - /// or the specified level is not within the acceptable range. - @discardableResult func torchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> VideoContextType - - /// Fetches a list of recorded video files. - /// The number of files fetched is controlled by the limit parameter. - /// - /// It is recommended not to be called in main thread. + + /// Starts the video recording session. /// - /// - Parameter limit: An integer specifying the maximum number of video files to fetch. + /// - Parameter errorHandler: A closure to handle any errors that occur during recording. /// - /// - Returns: An array of `VideoFile` instances. - func fetchVideoFiles(limit: Int) -> [VideoFile] -} + /// - Note: If `autoVideoOrientation` option is enabled, + /// it sets the orientation according to the current device orientation. + func startRecording(_ errorHandler: @escaping ErrorHandler) -// MARK: Non-throwing methods -// These methods encapsulate error handling within the method itself rather than propagating it to the caller. -// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended -// because it offers less control to callers. -// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. -extension VideoContext { - /// Starts the recording of a video session. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Note: If auto video orientation is enabled, - /// it sets the orientation according to the current device orientation. - public func startRecording(errorHandler: ErrorHandler? = nil) { - do { - try startRecordingWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - } - - /// Stops the current video recording session and attempts to save the video file to the album. - /// - /// Any errors that occur during the process are captured and logged. - /// - /// - Parameter completionHandler: A closure that handles the result of the operation. - /// It's called with a `Result` object that encapsulates either a `VideoFile` instance. - /// - /// - Note: It is recommended to use the ``stopRecording() async throws`` - /// for more straightforward error handling. - public func stopRecording( - _ completionHandler: @escaping (Result) -> Void = { _ in } - ) { - Task(priority: .utility) { - do { - let videoFile = try await self.stopRecordingWithError() - return completionHandler(.success(videoFile)) - } catch let error { - Logger.log(error: error) - return completionHandler(.failure(error)) - } - } - } + func stopRecording(_ completionHandler: @escaping (Result) -> Void) /// Mutes the audio input for the video recording session. /// - /// If an error occurs during the operation, the error is logged. + /// - Parameter errorHandler: A closure to handle any errors that occur when muting the audio. /// - /// - Returns: `AespaVideoContext`, for chaining calls. + /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - public func mute(errorHandler: ErrorHandler? = nil) -> VideoContextType { - do { - return try self.muteWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingVideoContext - } - + func mute(_ errorHandler: @escaping ErrorHandler) -> VideoContextType + /// Unmutes the audio input for the video recording session. /// - /// If an error occurs during the operation, the error is logged. + /// - Parameter errorHandler: A closure to handle any errors that occur when unmuting the audio. /// - /// - Returns: `AespaVideoContext`, for chaining calls. + /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - public func unmute(errorHandler: ErrorHandler? = nil) -> VideoContextType { - do { - return try self.unmuteWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation + func unmute(_ errorHandler: @escaping ErrorHandler) -> VideoContextType - return underlyingVideoContext - } - } - /// Sets the stabilization mode for the video recording session. /// - /// - Parameter mode: An `AVCaptureVideoStabilizationMode` value - /// indicating the stabilization mode to be set. - /// - /// If an error occurs during the operation, the error is logged. + /// - Parameters: + /// - mode: An `AVCaptureVideoStabilizationMode` value indicating the stabilization mode to be set. + /// - errorHandler: A closure to handle any errors that occur when setting the stabilization mode. /// - /// - Returns: `AespaVideoContext`, for chaining calls. + /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - public func stabilization( - mode: AVCaptureVideoStabilizationMode, - errorHandler: ErrorHandler? = nil - ) -> VideoContextType { - do { - return try self.stabilizationWithError(mode: mode) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } + func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler) -> VideoContextType - return underlyingVideoContext - } - /// Sets the torch mode and level for the video recording session. /// - /// If an error occurs during the operation, the error is logged. - /// /// - Parameters: - /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). - /// - level: The desired torch level as a Float between 0.0 and 1.0. + /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). + /// - level: The desired torch level as a Float between 0.0 and 1.0. /// /// - Returns: Returns self, allowing additional settings to be configured. /// /// - Note: This function might throw an error if the torch mode is not supported, - /// or the specified level is not within the acceptable range. + /// or the specified level is not within the acceptable range. @discardableResult - public func torch( - mode: AVCaptureDevice.TorchMode, - level: Float, - errorHandler: ErrorHandler? = nil - ) -> VideoContextType { - do { - return try self.torchWithError(mode: mode, level: level) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingVideoContext - } + func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler) -> VideoContextType /// Fetches a list of recorded video files. /// The number of files fetched is controlled by the limit parameter. /// - /// It is recommended not to be called in main thread. + /// It is recommended not to be called in the main thread. + /// + /// - Parameters: + /// - limit: An integer specifying the maximum number of video files to fetch. + /// Fetch all files if `limit` is zero(`0`) /// - /// - Parameter limit: An integer specifying the maximum number of video files to fetch. - /// If the limit is set to 0 (default), all recorded video files will be fetched. /// - Returns: An array of `VideoFile` instances. - public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { - fetchVideoFiles(limit: limit) - } + func fetchVideoFiles(limit: Int) -> [VideoFile] } /// A protocol that defines the behaviors and properties specific to the photo context. @@ -495,18 +200,22 @@ public protocol PhotoContext { /// A variable holding current `AVCapturePhotoSettings` var currentSetting: AVCapturePhotoSettings { get } - /// Asynchronously captures a photo with the specified `AVCapturePhotoSettings`. + /// Asynchronously captures a photo using the specified `AVCapturePhotoSettings`. /// - /// The captured photo is flattened into a `Data` object, and then added to an album. A `PhotoFile` - /// object is then created using the raw photo data and the current date. This `PhotoFile` is sent - /// through the `photoFileBufferSubject` and then returned to the caller. + /// If the photo capture is successful, it will return a `PhotoFile` + /// object through the provided completion handler. /// - /// If any part of this process fails, an `AespaError` is thrown. + /// In case of an error during the photo capture process, the error will be logged and also returned via + /// the completion handler. /// - /// - Returns: A `PhotoFile` object representing the captured photo. - /// - Throws: An `AespaError` if there is an issue capturing the photo, - /// flattening it into a `Data` object, or adding it to the album. - @discardableResult func capturePhotoWithError() async throws -> PhotoFile + /// - Parameters: + /// - completionHandler: A closure to be invoked once the photo capture process is completed. This + /// closure takes a `Result` type where `Success` contains a `PhotoFile` object and + /// `Failure` contains an `Error` object. By default, the closure does nothing. + /// + func capturePhoto( + _ completionHandler: @escaping (Result) -> Void + ) /// Sets the flash mode for the camera and returns the updated `AespaPhotoContext` instance. /// The returned instance can be used for chaining configuration. @@ -550,33 +259,6 @@ public protocol PhotoContext { // it offers less control to callers. // Developers are encouraged to use methods that throw errors, to gain finer control over error handling. extension PhotoContext { - /// Asynchronously captures a photo using the specified `AVCapturePhotoSettings`. - /// - /// If the photo capture is successful, it will return a `PhotoFile` - /// object through the provided completion handler. - /// - /// In case of an error during the photo capture process, the error will be logged and also returned via - /// the completion handler. - /// - /// - Parameters: - /// - completionHandler: A closure to be invoked once the photo capture process is completed. This - /// closure takes a `Result` type where `Success` contains a `PhotoFile` object and - /// `Failure` contains an `Error` object. By default, the closure does nothing. - /// - public func capturePhoto( - _ completionHandler: @escaping (Result) -> Void = { _ in } - ) { - Task(priority: .utility) { - do { - let photoFile = try await self.capturePhotoWithError() - return completionHandler(.success(photoFile)) - } catch let error { - Logger.log(error: error) - return completionHandler(.failure(error)) - } - } - } - /// Fetches a list of captured photo files. /// The number of files fetched is controlled by the limit parameter. /// diff --git a/Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift b/Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift similarity index 76% rename from Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift rename to Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift index 7fc5934..3536b17 100644 --- a/Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift +++ b/Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift @@ -7,7 +7,7 @@ import AVFoundation -struct SessionLaunchTuner: AespaSessionTuning { +struct SessionConfigurationTuner: AespaSessionTuning { let needTransaction = false func tune(_ session: T) throws { @@ -17,5 +17,7 @@ struct SessionLaunchTuner: AespaSessionTuning { try session.addMovieFileOutput() try session.addCapturePhotoOutput() session.startRunning() + + Logger.log(message: "Session is configured successfully") } } diff --git a/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift b/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift index bfdf531..8d5400e 100644 --- a/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift +++ b/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift @@ -16,5 +16,7 @@ struct SessionTerminationTuner: AespaSessionTuning { session.removeAudioInput() session.removeMovieInput() session.stopRunning() + + Logger.log(message: "Session is terminated successfully") } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index fe061fa..76a5883 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -135,7 +135,7 @@ private extension InteractivePreview { return } - session.focus(mode: currentFocusMode, point: value.location) +// session.focus(mode: currentFocusMode, point: value.location) focusingLocation = value.location if enableShowingCrosshair { @@ -166,7 +166,7 @@ private extension InteractivePreview { func resetFocusMode(to focusMode: AVCaptureDevice.FocusMode) { guard session.isRunning else { return } - session.focus(mode: focusMode) +// session.focus(mode: focusMode) } func showCrosshair() { From 57e653164a1ae35c186df0e9bf7139a2bfea76e1 Mon Sep 17 00:00:00 2001 From: Young Bin Lee Date: Thu, 29 Jun 2023 14:09:03 +0900 Subject: [PATCH 18/21] Refactor unused codes --- Demo/Aespa-iOS/VideoContentViewModel.swift | 90 +++++++++---------- Sources/Aespa/Aespa.swift | 45 ++++------ Sources/Aespa/AespaSession.swift | 64 ++++++------- Sources/Aespa/Core/AespaCoreRecorder.swift | 11 +-- Sources/Aespa/Core/AespaCoreSession.swift | 57 +++++++++--- .../Core/Context/AespaVideoContext.swift | 34 +++---- Sources/Aespa/Core/Context/Context.swift | 50 +++++------ .../Session/SessionConfigurationTuner.swift | 23 ----- .../Util/Extension/SwiftUI+Extension.swift | 4 +- Sources/Aespa/Util/Log/Logger.swift | 3 +- 10 files changed, 184 insertions(+), 197 deletions(-) delete mode 100644 Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index 801a952..96f61bb 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -30,57 +30,49 @@ class VideoContentViewModel: ObservableObject { let option = AespaOption(albumName: "Aespa-Demo") self.aespaSession = Aespa.session(with: option) - do { - // MARK: Settings should be done after `configure` - // Common setting - aespaSession - .focus(mode: .continuousAutoFocus) - .changeMonitoring(enabled: true) - .orientation(to: .portrait) - .quality(to: .high) - .custom(WideColorCameraTuner()) - - // Photo-only setting - aespaSession - .flashMode(to: .on) - .redEyeReduction(enabled: true) - - // Video-only setting - aespaSession - .mute() - .stabilization(mode: .auto) - - // Prepare video album cover - aespaSession.videoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil - } + // Common setting + aespaSession + .focus(mode: .continuousAutoFocus) + .changeMonitoring(enabled: true) + .orientation(to: .portrait) + .quality(to: .high) + .custom(WideColorCameraTuner()) + + // Photo-only setting + aespaSession + .flashMode(to: .on) + .redEyeReduction(enabled: true) + + // Video-only setting + aespaSession + .mute() + .stabilization(mode: .auto) + + // Prepare video album cover + aespaSession.videoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil } - .assign(to: \.videoAlbumCover, on: self) - .store(in: &subscription) - - // Prepare photo album cover - aespaSession.photoFilePublisher - .receive(on: DispatchQueue.main) - .map { result -> Image? in - if case .success(let file) = result { - return file.thumbnailImage - } else { - return nil - } + } + .assign(to: \.videoAlbumCover, on: self) + .store(in: &subscription) + + // Prepare photo album cover + aespaSession.photoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil } - .assign(to: \.photoAlbumCover, on: self) - .store(in: &subscription) - - try Aespa.configure() - - } catch let error { - print(error) - } + } + .assign(to: \.photoAlbumCover, on: self) + .store(in: &subscription) } func fetchVideoFiles() { diff --git a/Sources/Aespa/Aespa.swift b/Sources/Aespa/Aespa.swift index 0e15bad..bf3d4ba 100644 --- a/Sources/Aespa/Aespa.swift +++ b/Sources/Aespa/Aespa.swift @@ -15,54 +15,41 @@ open class Aespa { /// - Parameters: /// - option: The `AespaOption` to configure the session. /// - Returns: The newly created `AespaSession`. - public static func session(with option: AespaOption) -> AespaSession { + public static func session( + with option: AespaOption, + onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { + if let core { return core } + let newCore = AespaSession(option: option) - core = newCore - // Check logging option Logger.enableLogging = option.log.loggingEnabled - - return newCore - } - - /// Configures the `AespaSession` for recording. - /// Call this method to start the flow of data from the capture session’s inputs to its outputs. - /// - /// This method ensures that necessary permissions are granted - /// and the session is properly configured before starting. - /// If either the session isn't configured or the necessary permissions aren't granted, - /// it throws an error. - /// - /// - Warning: This method is synchronous and blocks until the session starts running or it fails, - /// which it reports by posting an `AVCaptureSessionRuntimeError` notification. - public static func configure(_ errorHandler: @escaping ErrorHandler = { _ in }) throws { - guard let core else { - throw AespaError.session(reason: .notConfigured) - } - + + // Configure session now Task { guard case .permitted = await AuthorizationChecker.checkCaptureAuthorizationStatus() else { throw AespaError.permission(reason: .denied) } - - Task.detached(priority: .background) { - core.startSession(errorHandler) - } + + newCore.startSession(onComplete) } + + core = newCore + return newCore } - + /// Terminates the current `AespaSession`. /// /// If a session has been started, it stops the session and releases resources. /// After termination, a new session needs to be configured to start recording again. - public static func terminate(_ errorHandler: @escaping ErrorHandler = { _ in }) throws { + public static func terminate(_ onComplete: @escaping CompletionHandler = { _ in }) throws { guard let core = core else { return } - core.terminateSession(errorHandler) + core.terminateSession(onComplete) } } diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 7ac9cec..6dd83e9 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -204,51 +204,51 @@ extension AespaSession: CommonContext { } @discardableResult - public func quality(to preset: AVCaptureSession.Preset, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func quality(to preset: AVCaptureSession.Preset, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = QualityTuner(videoQuality: preset) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func position(to position: AVCaptureDevice.Position, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func position(to position: AVCaptureDevice.Position, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = CameraPositionTuner(position: position, devicePreference: option.session.cameraDevicePreference) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func orientation(to orientation: AVCaptureVideoOrientation, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func orientation(to orientation: AVCaptureVideoOrientation, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = VideoOrientationTuner(orientation: orientation) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = FocusTuner(mode: mode, point: point) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func zoom(factor: CGFloat, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func zoom(factor: CGFloat, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = ZoomTuner(zoomFactor: factor) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func changeMonitoring(enabled: Bool, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { + public func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func custom(_ tuner: T, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaSession { - coreSession.run(tuner, errorHandler) + public func custom(_ tuner: T, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + coreSession.run(tuner, onComplete) return self } } @@ -272,8 +272,8 @@ extension AespaSession: VideoContext { videoContext.isMuted } - public func startRecording(_ errorHandler: @escaping ErrorHandler = { _ in }) { - videoContext.startRecording(errorHandler) + public func startRecording(_ onComplete: @escaping CompletionHandler = { _ in }) { + videoContext.startRecording(onComplete) } public func stopRecording(_ completionHandler: @escaping (Result) -> Void = { _ in }) { @@ -281,23 +281,23 @@ extension AespaSession: VideoContext { } @discardableResult - public func mute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { - videoContext.mute(errorHandler) + public func mute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.mute(onComplete) } @discardableResult - public func unmute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { - videoContext.unmute(errorHandler) + public func unmute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.unmute(onComplete) } @discardableResult - public func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { - videoContext.stabilization(mode: mode, errorHandler) + public func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.stabilization(mode: mode, onComplete) } @discardableResult - public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoSessionContext { - videoContext.torch(mode: mode, level: level, errorHandler) + public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.torch(mode: mode, level: level, onComplete) } public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { @@ -344,15 +344,17 @@ extension AespaSession: PhotoContext { } extension AespaSession { - func startSession(_ errorHandler: @escaping ErrorHandler) { - let tuner = SessionConfigurationTuner() - coreSession.run(tuner, errorHandler) - - previewLayerSubject.send(previewLayer) + func startSession(_ onComplete: @escaping CompletionHandler) { + do { + try coreSession.start() + previewLayerSubject.send(previewLayer) + } catch let error { + onComplete(.failure(error)) + } } - func terminateSession(_ errorHandler: @escaping ErrorHandler) { + func terminateSession(_ onComplete: @escaping CompletionHandler) { let tuner = SessionTerminationTuner() - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) } } diff --git a/Sources/Aespa/Core/AespaCoreRecorder.swift b/Sources/Aespa/Core/AespaCoreRecorder.swift index aac6e03..82391ac 100644 --- a/Sources/Aespa/Core/AespaCoreRecorder.swift +++ b/Sources/Aespa/Core/AespaCoreRecorder.swift @@ -21,23 +21,24 @@ class AespaCoreRecorder: NSObject { self.core = core } - func run(processor: T, _ errorHandler: @escaping ErrorHandler) { + func run(processor: T, _ onComplete: @escaping CompletionHandler) { guard let output = core.movieFileOutput else { - errorHandler(AespaError.session(reason: .cannotFindConnection)) + onComplete(.failure(AespaError.session(reason: .cannotFindConnection))) return } do { try processor.process(output) + onComplete(.success(())) } catch { - errorHandler(error) + onComplete(.failure(error)) } } } extension AespaCoreRecorder { - func startRecording(in filePath: URL, _ errorHandler: @escaping ErrorHandler) { - run(processor: StartRecordProcessor(filePath: filePath, delegate: self), errorHandler) + func startRecording(in filePath: URL, _ onComplete: @escaping CompletionHandler) { + run(processor: StartRecordProcessor(filePath: filePath, delegate: self), onComplete) } func stopRecording() async throws -> URL { diff --git a/Sources/Aespa/Core/AespaCoreSession.swift b/Sources/Aespa/Core/AespaCoreSession.swift index 773e96e..f879bae 100644 --- a/Sources/Aespa/Core/AespaCoreSession.swift +++ b/Sources/Aespa/Core/AespaCoreSession.swift @@ -12,30 +12,36 @@ import AVFoundation class AespaCoreSession: AVCaptureSession { var option: AespaOption - - var workQueue = DispatchQueue(label: "coreSession.workQueue", qos: .background) + private var workQueue = OperationQueue() init(option: AespaOption) { self.option = option + + workQueue.qualityOfService = .background + workQueue.maxConcurrentOperationCount = 1 + workQueue.isSuspended = true } - func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { - workQueue.async { + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { do { if tuner.needTransaction { self.beginConfiguration() } defer { if tuner.needTransaction { self.commitConfiguration() } + onComplete(.success(())) } try tuner.tune(self) } catch let error { - errorHandler(error) + print(tuner) + Logger.log(error: error) + onComplete(.failure(error)) } } } - func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { - workQueue.async { + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { do { guard let device = self.videoDeviceInput?.device else { throw AespaError.device(reason: .invalid) @@ -44,40 +50,63 @@ class AespaCoreSession: AVCaptureSession { if tuner.needLock { try device.lockForConfiguration() } defer { if tuner.needLock { device.unlockForConfiguration() } + onComplete(.success(())) } try tuner.tune(device) } catch let error { - errorHandler(error) + print(tuner) + Logger.log(error: error) + onComplete(.failure(error)) } } } - func run(_ tuner: T, _ errorHandler: @escaping ErrorHandler) { - workQueue.async { + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { do { guard let connection = self.connections.first else { throw AespaError.session(reason: .cannotFindConnection) } try tuner.tune(connection) + onComplete(.success(())) } catch let error { - errorHandler(error) + print(tuner) + Logger.log(error: error) + onComplete(.failure(error)) } } } - func run(_ processor: T, _ errorHandler: @escaping ErrorHandler) { - workQueue.async { + func run(_ processor: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { do { guard let output = self.movieFileOutput else { throw AespaError.session(reason: .cannotFindConnection) } try processor.process(output) + onComplete(.success(())) } catch let error { - errorHandler(error) + print(processor) + Logger.log(error: error) + onComplete(.failure(error)) } } } + + func start() throws { + let session = self + + guard session.isRunning == false else { return } + + try session.addMovieInput() + try session.addMovieFileOutput() + try session.addCapturePhotoOutput() + session.startRunning() + + self.workQueue.isSuspended = false + Logger.log(message: "Session is configured successfully") + } } diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index ba35afe..ab0a78c 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -75,7 +75,7 @@ extension AespaVideoContext: VideoContext { .eraseToAnyPublisher() } - public func startRecording(_ errorHandler: @escaping ErrorHandler = { _ in }) { + public func startRecording(_ onComplete: @escaping CompletionHandler = { _ in }) { do { let fileName = option.asset.fileNameHandler() let filePath = try FilePathProvider.requestFilePath( @@ -86,17 +86,17 @@ extension AespaVideoContext: VideoContext { extension: "mp4") if option.session.autoVideoOrientationEnabled { - commonContext.orientation(to: UIDevice.current.orientation.toVideoOrientation, errorHandler) + commonContext.orientation(to: UIDevice.current.orientation.toVideoOrientation, onComplete) } - recorder.startRecording(in: filePath, errorHandler) + recorder.startRecording(in: filePath, onComplete) isRecording = true } catch let error { - errorHandler(error) + onComplete(.failure(error)) } } - public func stopRecording(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + public func stopRecording(_ onCompelte: @escaping ResultHandler = { _ in }) { Task(priority: .utility) { do { let videoFilePath = try await recorder.stopRecording() @@ -106,48 +106,48 @@ extension AespaVideoContext: VideoContext { videoFileBufferSubject.send(.success(videoFile)) isRecording = false - completionHandler(.success(videoFile)) + onCompelte(.success(videoFile)) } catch let error { Logger.log(error: error) - completionHandler(.failure(error)) + onCompelte(.failure(error)) } } } @discardableResult - public func mute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { + public func mute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: true) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func unmute(_ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { + public func unmute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: false) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { + public func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = VideoStabilizationTuner(stabilzationMode: mode) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { + public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = TorchTuner(level: level, torchMode: mode) - coreSession.run(tuner, errorHandler) + coreSession.run(tuner, onComplete) return self } - public func customize(_ tuner: T, _ errorHandler: @escaping ErrorHandler = { _ in }) -> AespaVideoContext { - coreSession.run(tuner, errorHandler) + public func customize(_ tuner: T, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { + coreSession.run(tuner, onComplete) return self } diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index d20bd91..519b411 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -11,8 +11,8 @@ import Foundation import AVFoundation /// -public typealias ErrorHandler = (Error) -> Void - +public typealias CompletionHandler = (Result) -> Void +public typealias ResultHandler = (Result) -> Void /// A protocol that defines the common behaviors and properties that all context types must implement. /// @@ -27,10 +27,10 @@ public protocol CommonContext { /// /// - Parameters: /// - preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func quality(to preset: AVCaptureSession.Preset, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func quality(to preset: AVCaptureSession.Preset, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// Sets the camera position for the video recording session. /// @@ -38,59 +38,59 @@ public protocol CommonContext { /// /// - Parameters: /// - position: An `AVCaptureDevice.Position` value indicating the camera position to be set. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func position(to position: AVCaptureDevice.Position, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func position(to position: AVCaptureDevice.Position, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// Sets the orientation for the session. /// /// - Parameters: /// - orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. /// /// - Note: It sets the orientation of the video you are recording, not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func orientation(to orientation: AVCaptureVideoOrientation, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func orientation(to orientation: AVCaptureVideoOrientation, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// Sets the autofocusing mode for the video recording session. /// /// - Parameters: /// - mode: The focus mode(`AVCaptureDevice.FocusMode`) for the session. /// - point: The point in the camera's field of view that the auto focus should prioritize. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint?, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint?, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// Sets the zoom factor for the video recording session. /// /// - Parameters: /// - factor: A `CGFloat` value indicating the zoom factor to be set. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func zoom(factor: CGFloat, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func zoom(factor: CGFloat, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// Changes monitoring status. /// /// - Parameters: /// - enabled: A boolean value to set monitoring status. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func changeMonitoring(enabled: Bool, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler) -> CommonContextType /// This function provides a way to use a custom tuner to modify the current session. /// The tuner must conform to `AespaSessionTuning`. /// /// - Parameters: /// - tuner: An instance that conforms to `AespaSessionTuning`. - /// - errorHandler: A closure to be executed if the session fails to run the tuner. + /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func custom(_ tuner: T, _ errorHandler: @escaping ErrorHandler) -> CommonContextType + @discardableResult func custom(_ tuner: T, _ onComplete: @escaping CompletionHandler) -> CommonContextType } /// A protocol that defines the behaviors and properties specific to the video context. @@ -120,39 +120,39 @@ public protocol VideoContext { /// Starts the video recording session. /// - /// - Parameter errorHandler: A closure to handle any errors that occur during recording. + /// - Parameter onComplete: A closure to handle any errors that occur during recording. /// /// - Note: If `autoVideoOrientation` option is enabled, /// it sets the orientation according to the current device orientation. - func startRecording(_ errorHandler: @escaping ErrorHandler) + func startRecording(_ onComplete: @escaping CompletionHandler) func stopRecording(_ completionHandler: @escaping (Result) -> Void) /// Mutes the audio input for the video recording session. /// - /// - Parameter errorHandler: A closure to handle any errors that occur when muting the audio. + /// - Parameter onComplete: A closure to handle any errors that occur when muting the audio. /// /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - func mute(_ errorHandler: @escaping ErrorHandler) -> VideoContextType + func mute(_ onComplete: @escaping CompletionHandler) -> VideoContextType /// Unmutes the audio input for the video recording session. /// - /// - Parameter errorHandler: A closure to handle any errors that occur when unmuting the audio. + /// - Parameter onComplete: A closure to handle any errors that occur when unmuting the audio. /// /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - func unmute(_ errorHandler: @escaping ErrorHandler) -> VideoContextType + func unmute(_ onComplete: @escaping CompletionHandler) -> VideoContextType /// Sets the stabilization mode for the video recording session. /// /// - Parameters: /// - mode: An `AVCaptureVideoStabilizationMode` value indicating the stabilization mode to be set. - /// - errorHandler: A closure to handle any errors that occur when setting the stabilization mode. + /// - onComplete: A closure to handle any errors that occur when setting the stabilization mode. /// /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - func stabilization(mode: AVCaptureVideoStabilizationMode, _ errorHandler: @escaping ErrorHandler) -> VideoContextType + func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler) -> VideoContextType /// Sets the torch mode and level for the video recording session. /// @@ -165,7 +165,7 @@ public protocol VideoContext { /// - Note: This function might throw an error if the torch mode is not supported, /// or the specified level is not within the acceptable range. @discardableResult - func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ errorHandler: @escaping ErrorHandler) -> VideoContextType + func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler) -> VideoContextType /// Fetches a list of recorded video files. /// The number of files fetched is controlled by the limit parameter. diff --git a/Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift b/Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift deleted file mode 100644 index 3536b17..0000000 --- a/Sources/Aespa/Tuner/Session/SessionConfigurationTuner.swift +++ /dev/null @@ -1,23 +0,0 @@ -// -// SessionLauncher.swift -// -// -// Created by 이영빈 on 2023/06/02. -// - -import AVFoundation - -struct SessionConfigurationTuner: AespaSessionTuning { - let needTransaction = false - - func tune(_ session: T) throws { - guard session.isRunning == false else { return } - - try session.addMovieInput() - try session.addMovieFileOutput() - try session.addCapturePhotoOutput() - session.startRunning() - - Logger.log(message: "Session is configured successfully") - } -} diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index 76a5883..fe061fa 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -135,7 +135,7 @@ private extension InteractivePreview { return } -// session.focus(mode: currentFocusMode, point: value.location) + session.focus(mode: currentFocusMode, point: value.location) focusingLocation = value.location if enableShowingCrosshair { @@ -166,7 +166,7 @@ private extension InteractivePreview { func resetFocusMode(to focusMode: AVCaptureDevice.FocusMode) { guard session.isRunning else { return } -// session.focus(mode: focusMode) + session.focus(mode: focusMode) } func showCrosshair() { diff --git a/Sources/Aespa/Util/Log/Logger.swift b/Sources/Aespa/Util/Log/Logger.swift index 1321fc3..fd3b6ff 100644 --- a/Sources/Aespa/Util/Log/Logger.swift +++ b/Sources/Aespa/Util/Log/Logger.swift @@ -18,11 +18,10 @@ class Logger { static func log( error: Error, - file: String = (#file as NSString).lastPathComponent, method: String = #function ) { if enableLogging { - print("[Aespa : error] [\(file) : \(method)] - \(error) : \(error.localizedDescription)") + print("[Aespa : error] [\(method)] - \(error) : \(error.localizedDescription)") } } } From c10b84900d48a9dcc11dc128283a99a396b49ae5 Mon Sep 17 00:00:00 2001 From: enebin Date: Fri, 30 Jun 2023 13:10:45 +0900 Subject: [PATCH 19/21] Update SwiftUI codes --- Demo/Aespa-iOS/VideoContentView.swift | 2 - Demo/Aespa-iOS/VideoContentViewModel.swift | 9 +- Sources/Aespa/AespaError.swift | 6 +- Sources/Aespa/AespaSession.swift | 5 - Sources/Aespa/Core/AespaCoreSession.swift | 12 +- .../AVCaptureDevice+AespaRepresentable.swift | 21 +- Sources/Aespa/Tuner/Device/FocusTuner.swift | 14 +- Sources/Aespa/Tuner/Device/TorchTuner.swift | 2 +- .../Util/Extension/SwiftUI+Extension.swift | 213 +----------------- Sources/Aespa/Util/Log/Logger.swift | 5 +- Sources/Aespa/View/InteractivePreview.swift | 198 ++++++++++++++++ Sources/Aespa/View/Preview.swift | 43 ++++ 12 files changed, 282 insertions(+), 248 deletions(-) create mode 100644 Sources/Aespa/View/InteractivePreview.swift create mode 100644 Sources/Aespa/View/Preview.swift diff --git a/Demo/Aespa-iOS/VideoContentView.swift b/Demo/Aespa-iOS/VideoContentView.swift index 5ecc24c..f370d12 100644 --- a/Demo/Aespa-iOS/VideoContentView.swift +++ b/Demo/Aespa-iOS/VideoContentView.swift @@ -22,8 +22,6 @@ struct VideoContentView: View { var body: some View { ZStack { viewModel.preview - .crosshair(enabled: false) - .pinchZoom(enabled: true) .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index 96f61bb..103d357 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -15,7 +15,8 @@ class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession var preview: InteractivePreview { - aespaSession.interactivePreview() + let option = InteractivePreviewOption(enableShowingCrosshair: false) + return aespaSession.interactivePreview(option: option) } private var subscription = Set() @@ -36,7 +37,11 @@ class VideoContentViewModel: ObservableObject { .changeMonitoring(enabled: true) .orientation(to: .portrait) .quality(to: .high) - .custom(WideColorCameraTuner()) + .custom(WideColorCameraTuner()) { result in + if case .failure(let error) = result { + print("Error: ", error) + } + } // Photo-only setting aespaSession diff --git a/Sources/Aespa/AespaError.swift b/Sources/Aespa/AespaError.swift index cc3b0be..bc4670b 100644 --- a/Sources/Aespa/AespaError.swift +++ b/Sources/Aespa/AespaError.swift @@ -53,8 +53,10 @@ public extension AespaError { "Output is already exists" case unableToSetOutput = "Unable to set output." - case unsupported = - "Unsupported device (supported on iPhone XR and later devices)" + case notSupported = + "Unsupported functionality." + case busy = + "Device is busy now." } enum PermissionErrorReason: String { diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 6dd83e9..139b5c0 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -157,11 +157,6 @@ open class AespaSession { // MARK: - Utilities public func getSubjectAreaDidChangePublisher() -> AnyPublisher { - if isSubjectAreaChangeMonitoringEnabled != true { - Logger.log( - message: "`isSubjectAreaChangeMonitoringEnabled` is not set `true`. `AVCaptureDeviceSubjectAreaDidChange` publisher may not publish anything.") - } - return NotificationCenter.default .publisher(for: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange) .eraseToAnyPublisher() diff --git a/Sources/Aespa/Core/AespaCoreSession.swift b/Sources/Aespa/Core/AespaCoreSession.swift index f879bae..55d5b90 100644 --- a/Sources/Aespa/Core/AespaCoreSession.swift +++ b/Sources/Aespa/Core/AespaCoreSession.swift @@ -33,8 +33,7 @@ class AespaCoreSession: AVCaptureSession { try tuner.tune(self) } catch let error { - print(tuner) - Logger.log(error: error) + Logger.log(error: error, message: "in \(tuner)") onComplete(.failure(error)) } } @@ -55,8 +54,7 @@ class AespaCoreSession: AVCaptureSession { try tuner.tune(device) } catch let error { - print(tuner) - Logger.log(error: error) + Logger.log(error: error, message: "in \(tuner)") onComplete(.failure(error)) } } @@ -72,8 +70,7 @@ class AespaCoreSession: AVCaptureSession { try tuner.tune(connection) onComplete(.success(())) } catch let error { - print(tuner) - Logger.log(error: error) + Logger.log(error: error, message: "in \(tuner)") onComplete(.failure(error)) } } @@ -89,8 +86,7 @@ class AespaCoreSession: AVCaptureSession { try processor.process(output) onComplete(.success(())) } catch let error { - print(processor) - Logger.log(error: error) + Logger.log(error: error, message: "in \(processor)") onComplete(.failure(error)) } } diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index 55bfda4..39f400d 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -20,7 +20,7 @@ protocol AespaCaptureDeviceRepresentable: NSObject { func isFocusModeSupported(_ focusMode: AVCaptureDevice.FocusMode) -> Bool func zoomFactor(_ factor: CGFloat) - func focusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws func torchMode(_ torchMode: AVCaptureDevice.TorchMode) func setTorchModeOn(level torchLevel: Float) throws } @@ -39,10 +39,21 @@ extension AVCaptureDevice: AespaCaptureDeviceRepresentable { } } - func focusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) { - self.focusMode = focusMode - if let point { - self.focusPointOfInterest = point + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws { + if isAdjustingFocus { + throw AespaError.device(reason: .busy) + } + + if isFocusModeSupported(focusMode) { + self.focusMode = focusMode + } else { + throw AespaError.device(reason: .notSupported) + } + + if isFocusPointOfInterestSupported { + if let point { self.focusPointOfInterest = point } + } else { + throw AespaError.device(reason: .notSupported) } } diff --git a/Sources/Aespa/Tuner/Device/FocusTuner.swift b/Sources/Aespa/Tuner/Device/FocusTuner.swift index 084a4c9..67677f4 100644 --- a/Sources/Aespa/Tuner/Device/FocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/FocusTuner.swift @@ -13,21 +13,13 @@ struct FocusTuner: AespaDeviceTuning { let needLock = true let mode: AVCaptureDevice.FocusMode - let point: CGPoint? // Should be passed as original CGPoint, not mapped + let point: CGPoint? func tune(_ device: T) throws { guard device.isFocusModeSupported(mode) else { - throw AespaError.device(reason: .unsupported) - } - - var parsedPoint = point - if let point { - parsedPoint = CGPoint( - x: point.x / UIScreen.main.bounds.width, - y: point.y / UIScreen.main.bounds.height - ) + throw AespaError.device(reason: .notSupported) } - device.focusMode(mode, point: parsedPoint) + try device.setFocusMode(mode, point: point) } } diff --git a/Sources/Aespa/Tuner/Device/TorchTuner.swift b/Sources/Aespa/Tuner/Device/TorchTuner.swift index 18b9001..4df7668 100644 --- a/Sources/Aespa/Tuner/Device/TorchTuner.swift +++ b/Sources/Aespa/Tuner/Device/TorchTuner.swift @@ -14,7 +14,7 @@ struct TorchTuner: AespaDeviceTuning { func tune(_ device: T) throws where T: AespaCaptureDeviceRepresentable { guard device.hasTorch else { - throw AespaError.device(reason: .unsupported) + throw AespaError.device(reason: .notSupported) } device.torchMode(torchMode) diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index fe061fa..366e878 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -34,218 +34,9 @@ public extension AespaSession { /// Make sure you're using this mode for the feature to work. func interactivePreview( gravity: AVLayerVideoGravity = .resizeAspectFill, - prefereedFocusMode focusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus + option: InteractivePreviewOption = .init() ) -> InteractivePreview { let internalPreview = Preview(of: self, gravity: gravity) - return InteractivePreview(internalPreview, preferredFocusMode: focusMode) - } -} - -public struct InteractivePreview: View { - private let preview: Preview - - // Position - @State private var enableChangePosition = true - - // Zoom - @State private var enableZoom = true - @State private var previousZoomFactor: CGFloat = 1.0 - @State private var currentZoomFactor: CGFloat = 1.0 - - // Foocus - @State private var preferredFocusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus - @State private var enableFocus = true - @State private var focusingLocation = CGPoint.zero - // Crosshair - @State private var enableShowingCrosshair = true - @State private var focusFrameOpacity: Double = 0 - @State private var showingCrosshairTask: Task? - - var subjectAreaChangeMonitoringSubscription: Cancellable? - - init(_ preview: Preview, preferredFocusMode focusMode: AVCaptureDevice.FocusMode) { - self.preview = preview - self.preferredFocusMode = focusMode - self.subjectAreaChangeMonitoringSubscription = preview - .session - .getSubjectAreaDidChangePublisher() - .sink(receiveValue: { [self] _ in - self.resetFocusMode(to: focusMode) - }) - } - - var session: AespaSession { - preview.session - } - - var layer: AVCaptureVideoPreviewLayer { - preview.previewLayer - } - - var currentFocusMode: AVCaptureDevice.FocusMode? { - session.currentFocusMode - } - - var currentCameraPosition: AVCaptureDevice.Position? { - session.currentCameraPosition - } - - public var body: some View { - ZStack { - preview - .gesture(changePositionGesture) - .gesture(tapToFocusGesture) - .gesture(pinchZoomGesture) - - // Crosshair - Rectangle() - .stroke(lineWidth: 1) - .foregroundColor(Color.yellow) - .frame(width: 100, height: 100) - .position(focusingLocation) - .opacity(focusFrameOpacity) - .animation(.spring(), value: focusFrameOpacity) - } - } -} - -private extension InteractivePreview { - var changePositionGesture: some Gesture { - guard session.isRunning, enableChangePosition else { - return TapGesture(count: 2).onEnded{} - } - - return TapGesture(count: 2).onEnded { - let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back - session.position(to: nextPosition) - } - } - - var tapToFocusGesture: some Gesture { - guard session.isRunning, enableFocus else { - return DragGesture(minimumDistance: 0).onEnded{ _ in } - } - - return DragGesture(minimumDistance: 0) - .onEnded { value in - guard - let currentFocusMode, - currentFocusMode == .autoFocus || currentFocusMode == .continuousAutoFocus - else { - return - } - - session.focus(mode: currentFocusMode, point: value.location) - focusingLocation = value.location - - if enableShowingCrosshair { - showCrosshair() - } - } - } - - var pinchZoomGesture: some Gesture { - guard session.isRunning, enableZoom else { - return MagnificationGesture().onChanged { _ in } .onEnded { _ in } - } - - let maxZoomFactor = session.maxZoomFactor ?? 1.0 - return MagnificationGesture() - .onChanged { (scale) in - let videoZoomFactor = scale * previousZoomFactor - if (videoZoomFactor <= maxZoomFactor) { - let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) - session.zoom(factor: newZoomFactor) - } - } - .onEnded { (scale) in - let videoZoomFactor = scale * previousZoomFactor - previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 - } - } - - func resetFocusMode(to focusMode: AVCaptureDevice.FocusMode) { - guard session.isRunning else { return } - session.focus(mode: focusMode) - } - - func showCrosshair() { - guard enableShowingCrosshair else { return } - - // Cancel the previous task - showingCrosshairTask?.cancel() - // Running a new task - showingCrosshairTask = Task { - // 10^9 nano seconds = 1 second - let second: UInt64 = 1_000_000_000 - - withAnimation { focusFrameOpacity = 1 } - - try await Task.sleep(nanoseconds: 2 * second) - withAnimation { focusFrameOpacity = 0.35 } - - try await Task.sleep(nanoseconds: 3 * second) - withAnimation { focusFrameOpacity = 0 } - } - } -} - -public extension InteractivePreview { - func crosshair(enabled: Bool) -> Self { - enableShowingCrosshair = enabled - return self - } - - func tapToFocus(enabled: Bool) -> Self { - enableFocus = enabled - return self - } - - func preferredFocusMode(_ mode: AVCaptureDevice.FocusMode) -> Self { - preferredFocusMode = mode - return self - } - - func doubleTapToChangeCameraPosition(enabled: Bool) -> Self { - enableChangePosition = enabled - return self - } - - func pinchZoom(enabled: Bool) -> Self { - enableZoom = enabled - return self - } -} - -struct Preview: UIViewControllerRepresentable { - let session: AespaSession - let gravity: AVLayerVideoGravity - let previewLayer: AVCaptureVideoPreviewLayer - - init( - of session: AespaSession, - gravity: AVLayerVideoGravity - ) { - self.gravity = gravity - self.session = session - self.previewLayer = session.previewLayer - } - - func makeUIViewController(context: Context) -> UIViewController { - let viewController = UIViewController() - viewController.view.backgroundColor = .clear - - return viewController - } - - func updateUIViewController(_ uiViewController: UIViewController, context: Context) { - previewLayer.videoGravity = gravity - uiViewController.view.layer.addSublayer(previewLayer) - - previewLayer.frame = uiViewController.view.bounds - } - - func dismantleUIViewController(_ uiViewController: UIViewController, coordinator: ()) { - previewLayer.removeFromSuperlayer() + return InteractivePreview(internalPreview) } } diff --git a/Sources/Aespa/Util/Log/Logger.swift b/Sources/Aespa/Util/Log/Logger.swift index fd3b6ff..f925865 100644 --- a/Sources/Aespa/Util/Log/Logger.swift +++ b/Sources/Aespa/Util/Log/Logger.swift @@ -18,10 +18,13 @@ class Logger { static func log( error: Error, + message: String = "", method: String = #function ) { if enableLogging { - print("[Aespa : error] [\(method)] - \(error) : \(error.localizedDescription)") + let timestamp = Date().description + + print("[⚠️ Aespa Error] \(timestamp) | Method: \(method) | Error: \(error) | Description: \(error.localizedDescription) | Message: \(message)") } } } diff --git a/Sources/Aespa/View/InteractivePreview.swift b/Sources/Aespa/View/InteractivePreview.swift new file mode 100644 index 0000000..62328d3 --- /dev/null +++ b/Sources/Aespa/View/InteractivePreview.swift @@ -0,0 +1,198 @@ +// +// InteractivePreview.swift +// +// +// Created by Young Bin on 2023/06/30. +// + +import Combine +import SwiftUI +import AVFoundation + +public struct InteractivePreviewOption { + // Position + public var enableChangePosition = true + // Zoom + public var enableZoom = true + // Foocus + public var enableFocus = true + public var enableChangeFocusModeWhenMoved = true + // Crosshair + public var enableShowingCrosshair = true + + public init( + enableChangePosition: Bool = true, + enableZoom: Bool = true, + enableFocus: Bool = true, + enableChangeFocusModeWhenMoved: Bool = true, + enableShowingCrosshair: Bool = true + ) { + self.enableChangePosition = enableChangePosition + self.enableZoom = enableZoom + self.enableFocus = enableFocus + self.enableChangeFocusModeWhenMoved = enableChangeFocusModeWhenMoved + self.enableShowingCrosshair = enableShowingCrosshair + } +} + +public struct InteractivePreview: View { + private let option: InteractivePreviewOption + private let preview: Preview + + // Zoom + @State private var previousZoomFactor: CGFloat = 1.0 + @State private var currentZoomFactor: CGFloat = 1.0 + + // Foocus + @State private var preferredFocusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus + @State private var focusingLocation = CGPoint.zero + + // Crosshair + @State private var focusFrameOpacity: Double = 0 + @State private var showingCrosshairTask: Task? + + private var subjectAreaChangeMonitoringSubscription: Cancellable? + + init(_ preview: Preview, option: InteractivePreviewOption = .init()) { + self.preview = preview + self.option = option + self.preferredFocusMode = preview.session.currentFocusMode ?? .continuousAutoFocus + + self.subjectAreaChangeMonitoringSubscription = preview + .session + .getSubjectAreaDidChangePublisher() + .sink(receiveValue: { [self] _ in + if option.enableChangeFocusModeWhenMoved { + self.resetFocusMode() + } + }) + + } + + var session: AespaSession { + preview.session + } + + var layer: AVCaptureVideoPreviewLayer { + preview.previewLayer + } + + var currentFocusMode: AVCaptureDevice.FocusMode? { + session.currentFocusMode + } + + var currentCameraPosition: AVCaptureDevice.Position? { + session.currentCameraPosition + } + + public var body: some View { + GeometryReader { geometry in + ZStack { + preview + .gesture(changePositionGesture) + .gesture(tapToFocusGesture(geometry)) + .gesture(pinchZoomGesture) + + // Crosshair + Rectangle() + .stroke(lineWidth: 1) + .foregroundColor(Color.yellow) + .frame(width: 100, height: 100) + .position(focusingLocation) + .opacity(focusFrameOpacity) + .animation(.spring(), value: focusFrameOpacity) + } + } + } +} + +private extension InteractivePreview { + var changePositionGesture: some Gesture { + guard session.isRunning, option.enableChangePosition else { + return TapGesture(count: 2).onEnded{} + } + + return TapGesture(count: 2).onEnded { + let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back + session.position(to: nextPosition) + } + } + + func tapToFocusGesture(_ geometry: GeometryProxy) -> some Gesture { + guard session.isRunning, option.enableFocus else { + return DragGesture(minimumDistance: 0).onEnded{ _ in } + } + + return DragGesture(minimumDistance: 0) + .onEnded { value in + guard + let currentFocusMode, + currentFocusMode == .locked || currentFocusMode == .continuousAutoFocus + else { + return + } + + var point = value.location + point = CGPoint( + x: point.x / geometry.size.width, + y: point.y / geometry.size.height + ) + print(point) + + session.focus(mode: .autoFocus, point: point) { result in + print("Done") + } + focusingLocation = value.location + + if option.enableShowingCrosshair { + showCrosshair() + } + } + } + + var pinchZoomGesture: some Gesture { + guard session.isRunning, option.enableZoom else { + return MagnificationGesture().onChanged { _ in } .onEnded { _ in } + } + + let maxZoomFactor = session.maxZoomFactor ?? 1.0 + return MagnificationGesture() + .onChanged { (scale) in + let videoZoomFactor = scale * previousZoomFactor + if (videoZoomFactor <= maxZoomFactor) { + let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) + session.zoom(factor: newZoomFactor) + } + } + .onEnded { (scale) in + let videoZoomFactor = scale * previousZoomFactor + previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 + } + } + + func resetFocusMode() { + guard session.isRunning else { return } + session.focus(mode: preferredFocusMode) + } + + func showCrosshair() { + print(option.enableShowingCrosshair) + guard option.enableShowingCrosshair else { return } + + // Cancel the previous task + showingCrosshairTask?.cancel() + // Running a new task + showingCrosshairTask = Task { + // 10^9 nano seconds = 1 second + let second: UInt64 = 1_000_000_000 + + withAnimation { focusFrameOpacity = 1 } + + try await Task.sleep(nanoseconds: 2 * second) + withAnimation { focusFrameOpacity = 0.35 } + + try await Task.sleep(nanoseconds: 3 * second) + withAnimation { focusFrameOpacity = 0 } + } + } +} diff --git a/Sources/Aespa/View/Preview.swift b/Sources/Aespa/View/Preview.swift new file mode 100644 index 0000000..375d98f --- /dev/null +++ b/Sources/Aespa/View/Preview.swift @@ -0,0 +1,43 @@ +// +// File.swift +// +// +// Created by Young Bin on 2023/06/30. +// + +import SwiftUI +import Foundation +import AVFoundation + +struct Preview: UIViewControllerRepresentable { + let session: AespaSession + let gravity: AVLayerVideoGravity + let previewLayer: AVCaptureVideoPreviewLayer + + init( + of session: AespaSession, + gravity: AVLayerVideoGravity + ) { + self.gravity = gravity + self.session = session + self.previewLayer = session.previewLayer + } + + func makeUIViewController(context: Context) -> UIViewController { + let viewController = UIViewController() + viewController.view.backgroundColor = .clear + + return viewController + } + + func updateUIViewController(_ uiViewController: UIViewController, context: Context) { + previewLayer.videoGravity = gravity + uiViewController.view.layer.addSublayer(previewLayer) + + previewLayer.frame = uiViewController.view.bounds + } + + func dismantleUIViewController(_ uiViewController: UIViewController, coordinator: ()) { + previewLayer.removeFromSuperlayer() + } +} From 7438a97b9c3dc7af88d07b61473c5256444971c2 Mon Sep 17 00:00:00 2001 From: enebin Date: Thu, 29 Jun 2023 22:59:56 +0900 Subject: [PATCH 20/21] Update README Update README --- README.md | 51 ++++++++++++++++++--------------------------------- 1 file changed, 18 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 58ab27b..19365e6 100644 --- a/README.md +++ b/README.md @@ -5,14 +5,13 @@
-### Add a camera in just 3 lines +### From camera to album. In just 2 lines.
``` Swift let aespaOption = AespaOption(albumName: "YOUR_ALBUM_NAME") let aespaSession = Aespa.session(with: aespaOption) -try await Aespa.configure() // Done! ``` @@ -47,7 +46,8 @@ try await Aespa.configure() ## Introduction Aespa is a robust and intuitive Swift package for video capturing, built with a focus on the ease of setting up and usage. -It is designed to be easy to use for both beginners and experienced developers. If you're new to video recording on iOS or if you're looking to simplify your existing recording setup, Aespa could be the perfect fit for your project. + +It is designed to be easy to use from beginners to intermediate developers. If you're new to video recording on iOS or if you're looking to simplify your existing camera setup, Aespa could be the perfect fit for your project. ### ✅ Super easy to use @@ -101,7 +101,7 @@ AS --> D["Fetching asset files"] ### ✅ No more delegate
- Combine & async support + Combine support ``` mermaid graph LR; @@ -115,9 +115,10 @@ graph LR;
-### ✅ Also... -- Seamless image and video capture within a single preview session. +### ✅ Also - Automated system permission management. +- Seamless image and video capture within a single preview session. +- Thread-safe. - Support SPM. @@ -127,7 +128,7 @@ graph LR; > > You can access our **official documentation** for the most comprehensive and up-to-date explanations in [here](https://enebin.github.io/Aespa/documentation/aespa/) -### Interactive Preview +### `InteractivePreview` One of our main feature, `InteractivePreview` provides a comprehensive and intuitive way for users to interact directly with the camera preview. | Features | Description | @@ -137,7 +138,7 @@ One of our main feature, `InteractivePreview` provides a comprehensive and intui | Pinch zoom | Allows zooming in or out on the preview by using a pinch gesture. | -### More manaul options +### More manual options | Common | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| | ✨ `zoom` | Modifies the zoom factor. | @@ -201,14 +202,7 @@ import Aespa let aespaOption = AespaOption(albumName: "YOUR_ALBUM_NAME") let aespaSession = Aespa.session(with: aespaOption) - -Task(priority: .background) { - try await Aespa.configure() -} ``` -> **Warning** -> -> Please ensure to call `configure` within a background execution context. Neglecting to do so may lead to significantly reduced responsiveness in your application. ([reference](https://developer.apple.com/documentation/avfoundation/avcapturesession/1388185-startrunning)) ## Implementation Exapmles ### Configuration @@ -243,7 +237,6 @@ aespaSession.capturePhoto() ``` ## SwiftUI Integration - Aespa also provides a super-easy way to integrate video capture functionality into SwiftUI applications. AespaSession includes a helper method to create a SwiftUI `UIViewRepresentable` that provides a preview of the video capture. ### Example usage @@ -269,28 +262,20 @@ struct VideoContentView: View { class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession - var preview: some UIViewRepresentable { - aespaSession.preview() + var preview: some View { + aespaSession.interactivePreview() } init() { let option = AespaOption(albumName: "Aespa-Demo") self.aespaSession = Aespa.session(with: option) - - Task(priority: .background) { - do { - try await Aespa.configure() - aespaSession - .autofocusing(mode: .continuousAutoFocus) - .orientation(to: .portrait) - .quality(to: .high) - - // Other settings ... - - } catch let error { - print(error) - } - } + + aespaSession + .autofocusing(mode: .continuousAutoFocus) + .orientation(to: .portrait) + .quality(to: .high) + + // Other settings... } } ``` From ec26ed04e474ad805685e0eb6ab07a1842adb2ca Mon Sep 17 00:00:00 2001 From: enebin Date: Fri, 30 Jun 2023 13:46:46 +0900 Subject: [PATCH 21/21] Update tests --- Demo/Aespa-iOS/VideoContentViewModel.swift | 18 +++--- README.md | 23 ++++++- Sources/Aespa/AespaSession.swift | 62 +++++++++++++----- .../Core/Context/AespaVideoContext.swift | 20 ++++-- Sources/Aespa/Core/Context/Context.swift | 64 +++++++++++++++---- .../AVCaptureDevice+AespaRepresentable.swift | 7 +- .../Tuner/Device/ChangeMonitoringTuner.swift | 2 +- Sources/Aespa/Util/Log/Logger.swift | 9 ++- Sources/Aespa/View/InteractivePreview.swift | 26 +++++--- Tests/Tests/Tuner/DeviceTunerTests.swift | 15 +++-- Tests/Tests/Tuner/SessionTunerTests.swift | 58 +---------------- 11 files changed, 183 insertions(+), 121 deletions(-) diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift index 103d357..7862117 100644 --- a/Demo/Aespa-iOS/VideoContentViewModel.swift +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -14,9 +14,12 @@ import Aespa class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession - var preview: InteractivePreview { - let option = InteractivePreviewOption(enableShowingCrosshair: false) - return aespaSession.interactivePreview(option: option) + var preview: some View { + return aespaSession.interactivePreview() + + // Or you can give some options +// let option = InteractivePreviewOption(enableShowingCrosshair: false) +// return aespaSession.interactivePreview(option: option) } private var subscription = Set() @@ -93,13 +96,8 @@ class VideoContentViewModel: ObservableObject { func fetchPhotoFiles() { // File fetching task can cause low reponsiveness when called from main thread - DispatchQueue.global().async { - let fetchedFiles = self.aespaSession.fetchPhotoFiles() - - DispatchQueue.main.async { - self.photoFiles = fetchedFiles - } - } + let fetchedFiles = self.aespaSession.fetchPhotoFiles() + self.photoFiles = fetchedFiles } } diff --git a/README.md b/README.md index 19365e6..3dd5fe3 100644 --- a/README.md +++ b/README.md @@ -133,8 +133,8 @@ One of our main feature, `InteractivePreview` provides a comprehensive and intui | Features | Description | |------------------------|------------------------------------------------------------------------------------------------------------------| -| Tap-to-focus | Adjusts the focus of the camera based on the tapped area on the screen. | -| Double tap camera change | Switches between the front and back camera upon double tapping. | +| Tap to focus | Adjusts the focus of the camera based on the tapped area on the screen. | +| Double tap to change camera | Switches between the front and back camera upon double tapping. | | Pinch zoom | Allows zooming in or out on the preview by using a pinch gesture. | @@ -235,9 +235,26 @@ aespaSession.stopRecording() // Capture photo aespaSession.capturePhoto() ``` +### Get result +``` Swift +aespaSession.stopRecording { result in + switch result { + case .success(let file): + // + case .failure(let error): + print(error) + } +} + +// or +aespaSession.fetchVideoFiles(limit: 1) + +// or you can use publisher +aespaSession.videoFilePublisher.sink { result in ... } +``` ## SwiftUI Integration -Aespa also provides a super-easy way to integrate video capture functionality into SwiftUI applications. AespaSession includes a helper method to create a SwiftUI `UIViewRepresentable` that provides a preview of the video capture. +Aespa also provides a super-easy way to integrate video capture functionality into SwiftUI applications. `AespaSession` includes a helper method to create a SwiftUI `UIViewRepresentable` that provides a preview of the video capture. ### Example usage diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 139b5c0..e568c42 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -103,6 +103,7 @@ open class AespaSession { coreSession } + /// This property indicates whether the current session is active or not. public var isRunning: Bool { coreSession.isRunning } @@ -137,6 +138,10 @@ open class AespaSession { return device.position } + /// This property indicates whether the camera device is set to monitor changes in the subject area. + /// + /// Enabling subject area change monitoring allows the device to adjust focus and exposure settings automatically + /// when the subject within the specified area changes. public var isSubjectAreaChangeMonitoringEnabled: Bool? { guard let device = coreSession.videoDeviceInput?.device else { return nil } return device.isSubjectAreaChangeMonitoringEnabled @@ -155,7 +160,12 @@ open class AespaSession { } // MARK: - Utilities - + /// Returns a publisher that emits a `Notification` when the subject area of the capture device changes. + /// + /// This is useful when you want to react to changes in the capture device's subject area, + /// such as when the user changes the zoom factor, or when the device changes its autofocus area. + /// + /// - Returns: An `AnyPublisher` instance that emits `Notification` values. public func getSubjectAreaDidChangePublisher() -> AnyPublisher { return NotificationCenter.default .publisher(for: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange) @@ -199,34 +209,46 @@ extension AespaSession: CommonContext { } @discardableResult - public func quality(to preset: AVCaptureSession.Preset, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func quality( + to preset: AVCaptureSession.Preset, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = QualityTuner(videoQuality: preset) coreSession.run(tuner, onComplete) return self } - + @discardableResult - public func position(to position: AVCaptureDevice.Position, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func position( + to position: AVCaptureDevice.Position, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = CameraPositionTuner(position: position, devicePreference: option.session.cameraDevicePreference) coreSession.run(tuner, onComplete) return self } - + @discardableResult - public func orientation(to orientation: AVCaptureVideoOrientation, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func orientation( + to orientation: AVCaptureVideoOrientation, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = VideoOrientationTuner(orientation: orientation) coreSession.run(tuner, onComplete) return self } - + @discardableResult - public func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func focus( + mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = FocusTuner(mode: mode, point: point) coreSession.run(tuner, onComplete) return self } - + @discardableResult public func zoom(factor: CGFloat, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = ZoomTuner(zoomFactor: factor) @@ -235,14 +257,17 @@ extension AespaSession: CommonContext { } @discardableResult - public func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) coreSession.run(tuner, onComplete) return self } @discardableResult - public func custom(_ tuner: T, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + public func custom( + _ tuner: T, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { coreSession.run(tuner, onComplete) return self } @@ -286,15 +311,22 @@ extension AespaSession: VideoContext { } @discardableResult - public func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + public func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoSessionContext { videoContext.stabilization(mode: mode, onComplete) } - + @discardableResult - public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + public func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoSessionContext { videoContext.torch(mode: mode, level: level, onComplete) } - + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { videoContext.fetchVideoFiles(limit: limit) } diff --git a/Sources/Aespa/Core/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift index ab0a78c..a705314 100644 --- a/Sources/Aespa/Core/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -131,7 +131,10 @@ extension AespaVideoContext: VideoContext { } @discardableResult - public func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { + public func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { let tuner = VideoStabilizationTuner(stabilzationMode: mode) coreSession.run(tuner, onComplete) @@ -139,19 +142,26 @@ extension AespaVideoContext: VideoContext { } @discardableResult - public func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { + public func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { let tuner = TorchTuner(level: level, torchMode: mode) coreSession.run(tuner, onComplete) return self } - - public func customize(_ tuner: T, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { + + public func customize( + _ tuner: T, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { coreSession.run(tuner, onComplete) return self } - + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { return fileManager.fetchVideo( albumName: option.asset.albumName, diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift index 519b411..ea76544 100644 --- a/Sources/Aespa/Core/Context/Context.swift +++ b/Sources/Aespa/Core/Context/Context.swift @@ -10,8 +10,11 @@ import Combine import Foundation import AVFoundation -/// +/// A type representing a closure that handles a completion event with potential errors. public typealias CompletionHandler = (Result) -> Void + +/// A type representing a closure that handles a result of an operation +/// that produces a value of type `T`, with potential errors. public typealias ResultHandler = (Result) -> Void /// A protocol that defines the common behaviors and properties that all context types must implement. @@ -19,8 +22,9 @@ public typealias ResultHandler = (Result) -> Void /// It includes methods to control the quality, position, orientation, and auto-focusing behavior /// of the session. It also includes the ability to adjust the zoom level of the session. public protocol CommonContext { + /// associatedtype CommonContextType: CommonContext & VideoContext & PhotoContext - + /// var underlyingCommonContext: CommonContextType { get } /// Sets the quality preset for the video recording session. @@ -30,7 +34,10 @@ public protocol CommonContext { /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func quality(to preset: AVCaptureSession.Preset, _ onComplete: @escaping CompletionHandler) -> CommonContextType + @discardableResult func quality( + to preset: AVCaptureSession.Preset, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType /// Sets the camera position for the video recording session. /// @@ -41,7 +48,10 @@ public protocol CommonContext { /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func position(to position: AVCaptureDevice.Position, _ onComplete: @escaping CompletionHandler) -> CommonContextType + @discardableResult func position( + to position: AVCaptureDevice.Position, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType /// Sets the orientation for the session. /// @@ -51,8 +61,12 @@ public protocol CommonContext { /// /// - Returns: `AespaVideoContext`, for chaining calls. /// - /// - Note: It sets the orientation of the video you are recording, not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func orientation(to orientation: AVCaptureVideoOrientation, _ onComplete: @escaping CompletionHandler) -> CommonContextType + /// - Note: It sets the orientation of the video you are recording, + /// not the orientation of the `AVCaptureVideoPreviewLayer`. + @discardableResult func orientation( + to orientation: AVCaptureVideoOrientation, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType /// Sets the autofocusing mode for the video recording session. /// @@ -62,7 +76,11 @@ public protocol CommonContext { /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func focus(mode: AVCaptureDevice.FocusMode, point: CGPoint?, _ onComplete: @escaping CompletionHandler) -> CommonContextType + @discardableResult func focus( + mode: AVCaptureDevice.FocusMode, + point: CGPoint?, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType /// Sets the zoom factor for the video recording session. /// @@ -80,7 +98,10 @@ public protocol CommonContext { /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler) -> CommonContextType + @discardableResult func changeMonitoring( + enabled: Bool, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType /// This function provides a way to use a custom tuner to modify the current session. /// The tuner must conform to `AespaSessionTuning`. @@ -90,7 +111,10 @@ public protocol CommonContext { /// - onComplete: A closure to be executed if the session fails to run the tuner. /// /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func custom(_ tuner: T, _ onComplete: @escaping CompletionHandler) -> CommonContextType + @discardableResult func custom( + _ tuner: T, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType } /// A protocol that defines the behaviors and properties specific to the video context. @@ -99,7 +123,9 @@ public protocol CommonContext { /// the session is currently recording or muted, and controlling video recording, /// stabilization, torch mode, and fetching recorded video files. public protocol VideoContext { + /// associatedtype VideoContextType: VideoContext + /// var underlyingVideoContext: VideoContextType { get } /// A Boolean value that indicates whether the session is currently recording video. @@ -126,7 +152,14 @@ public protocol VideoContext { /// it sets the orientation according to the current device orientation. func startRecording(_ onComplete: @escaping CompletionHandler) - func stopRecording(_ completionHandler: @escaping (Result) -> Void) + /// Stops the current recording session and saves the video file. + /// + /// Once the recording session is successfully stopped and the video file is saved, + /// this function invokes a completion handler with the resulting `VideoFile` instance or an error. + /// + /// - Parameter onComplete: A closure to be called after the recording has stopped + /// and the video file is saved or failed. + func stopRecording(_ onComplete: @escaping (Result) -> Void) /// Mutes the audio input for the video recording session. /// @@ -152,7 +185,10 @@ public protocol VideoContext { /// /// - Returns: The modified `VideoContextType` for chaining calls. @discardableResult - func stabilization(mode: AVCaptureVideoStabilizationMode, _ onComplete: @escaping CompletionHandler) -> VideoContextType + func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler + ) -> VideoContextType /// Sets the torch mode and level for the video recording session. /// @@ -165,7 +201,11 @@ public protocol VideoContext { /// - Note: This function might throw an error if the torch mode is not supported, /// or the specified level is not within the acceptable range. @discardableResult - func torch(mode: AVCaptureDevice.TorchMode, level: Float, _ onComplete: @escaping CompletionHandler) -> VideoContextType + func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler + ) -> VideoContextType /// Fetches a list of recorded video files. /// The number of files fetched is controlled by the limit parameter. diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index 39f400d..cb7cccf 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -8,7 +8,7 @@ import Foundation import AVFoundation -protocol AespaCaptureDeviceRepresentable: NSObject { +protocol AespaCaptureDeviceRepresentable { var hasTorch: Bool { get } var focusMode: AVCaptureDevice.FocusMode { get set } var isSubjectAreaChangeMonitoringEnabled: Bool { get set } @@ -22,6 +22,7 @@ protocol AespaCaptureDeviceRepresentable: NSObject { func zoomFactor(_ factor: CGFloat) func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws func torchMode(_ torchMode: AVCaptureDevice.TorchMode) + func enableMonitoring(_ enabled: Bool) func setTorchModeOn(level torchLevel: Float) throws } @@ -38,6 +39,10 @@ extension AVCaptureDevice: AespaCaptureDeviceRepresentable { self.torchMode = .off } } + + func enableMonitoring(_ enabled: Bool) { + self.isSubjectAreaChangeMonitoringEnabled = enabled + } func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws { if isAdjustingFocus { diff --git a/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift index 74c8e28..6ecee84 100644 --- a/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift +++ b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift @@ -18,6 +18,6 @@ struct ChangeMonitoringTuner: AespaDeviceTuning { } func tune(_ device: T) throws { - device.isSubjectAreaChangeMonitoringEnabled = enabled + device.enableMonitoring(enabled) } } diff --git a/Sources/Aespa/Util/Log/Logger.swift b/Sources/Aespa/Util/Log/Logger.swift index f925865..2251eff 100644 --- a/Sources/Aespa/Util/Log/Logger.swift +++ b/Sources/Aespa/Util/Log/Logger.swift @@ -23,8 +23,13 @@ class Logger { ) { if enableLogging { let timestamp = Date().description - - print("[⚠️ Aespa Error] \(timestamp) | Method: \(method) | Error: \(error) | Description: \(error.localizedDescription) | Message: \(message)") + print( + "[⚠️ Aespa Error] \(timestamp) |" + + " Method: \(method) |" + + " Error: \(error) |" + + " Description: \(error.localizedDescription) |" + + " Message: \(message)" + ) } } } diff --git a/Sources/Aespa/View/InteractivePreview.swift b/Sources/Aespa/View/InteractivePreview.swift index 62328d3..d1c8401 100644 --- a/Sources/Aespa/View/InteractivePreview.swift +++ b/Sources/Aespa/View/InteractivePreview.swift @@ -9,17 +9,27 @@ import Combine import SwiftUI import AVFoundation +/// Struct that contains the options for customizing an `InteractivePreview`. +/// +/// The options include enabling or disabling certain interactive features such as changing position, +/// zooming, focusing, adjusting focus mode when moved, and showing a crosshair. public struct InteractivePreviewOption { - // Position + /// Flag that controls whether the camera position can be changed. Default is `true`. public var enableChangePosition = true - // Zoom + + /// Flag that controls whether zoom functionality is enabled. Default is `true`. public var enableZoom = true - // Foocus + + /// Flag that controls whether focus can be manually adjusted. Default is `true`. public var enableFocus = true + + /// Flag that controls whether the focus mode is changed when the camera is moved. Default is `true`. public var enableChangeFocusModeWhenMoved = true - // Crosshair - public var enableShowingCrosshair = true + /// Flag that controls whether a crosshair is displayed on the preview. Default is `true`. + public var enableShowingCrosshair = true + + /// Initialize the option public init( enableChangePosition: Bool = true, enableZoom: Bool = true, @@ -109,7 +119,7 @@ public struct InteractivePreview: View { private extension InteractivePreview { var changePositionGesture: some Gesture { guard session.isRunning, option.enableChangePosition else { - return TapGesture(count: 2).onEnded{} + return TapGesture(count: 2).onEnded {} } return TapGesture(count: 2).onEnded { @@ -120,7 +130,7 @@ private extension InteractivePreview { func tapToFocusGesture(_ geometry: GeometryProxy) -> some Gesture { guard session.isRunning, option.enableFocus else { - return DragGesture(minimumDistance: 0).onEnded{ _ in } + return DragGesture(minimumDistance: 0).onEnded { _ in } } return DragGesture(minimumDistance: 0) @@ -139,7 +149,7 @@ private extension InteractivePreview { ) print(point) - session.focus(mode: .autoFocus, point: point) { result in + session.focus(mode: .autoFocus, point: point) { _ in print("Done") } focusingLocation = value.location diff --git a/Tests/Tests/Tuner/DeviceTunerTests.swift b/Tests/Tests/Tuner/DeviceTunerTests.swift index 4b717df..4fd5a8f 100644 --- a/Tests/Tests/Tuner/DeviceTunerTests.swift +++ b/Tests/Tests/Tuner/DeviceTunerTests.swift @@ -22,22 +22,23 @@ final class DeviceTunerTests: XCTestCase { override func tearDownWithError() throws { device = nil } - - func testAutoFocusTuner() throws { + + func testFocusTuner() throws { let mode = AVCaptureDevice.FocusMode.locked let point = CGPoint() - let tuner = AutoFocusTuner(mode: mode, point: point) + let tuner = FocusTuner(mode: mode, point: point) stub(device) { proxy in when(proxy.isFocusModeSupported(equal(to: mode))).thenReturn(true) - when(proxy.focusMode(equal(to: mode), point: equal(to: point))).then { mode in + when(proxy.setFocusMode(equal(to: mode), + point: equal(to: point))).then { mode in when(proxy.focusMode.get).thenReturn(.locked) } } try tuner.tune(device) verify(device) - .focusMode(equal(to: mode), point: equal(to: point)) + .setFocusMode(equal(to: mode), point: equal(to: point)) .with(returnType: Void.self) XCTAssertEqual(device.focusMode, mode) @@ -69,7 +70,7 @@ final class DeviceTunerTests: XCTestCase { stub(device) { proxy in when(proxy.hasTorch.get).thenReturn(true) when(proxy.torchMode(equal(to: mode))).thenDoNothing() - when(proxy.torchModeOn(level: level)).thenDoNothing() + when(proxy.setTorchModeOn(level: level)).thenDoNothing() } try tuner.tune(device) @@ -78,7 +79,7 @@ final class DeviceTunerTests: XCTestCase { .with(returnType: Void.self) verify(device) - .torchModeOn(level: level) + .setTorchModeOn(level: level) .with(returnType: Void.self) } } diff --git a/Tests/Tests/Tuner/SessionTunerTests.swift b/Tests/Tests/Tuner/SessionTunerTests.swift index c1eaac2..1b07311 100644 --- a/Tests/Tests/Tuner/SessionTunerTests.swift +++ b/Tests/Tests/Tuner/SessionTunerTests.swift @@ -16,7 +16,7 @@ final class SessionTunerTests: XCTestCase { var mockSessionProtocol: MockAespaCoreSessionRepresentable! - override func upWithError() throws { + override func setUpWithError() throws { mockSessionProtocol = MockAespaCoreSessionRepresentable() } @@ -68,62 +68,6 @@ final class SessionTunerTests: XCTestCase { verify(mockSessionProtocol).removeAudioInput() } - func testSessionLaunchTuner_whenNotRunning() throws { - stub(mockSessionProtocol) { proxy in - when(proxy.isRunning.get).thenReturn(false) - - when(proxy.addMovieInput()).thenDoNothing() - when(proxy.addMovieFileOutput()).thenDoNothing() - when(proxy.addCapturePhotoOutput()).thenDoNothing() - - when(proxy.startRunning()).thenDoNothing() - } - - let tuner = SessionLaunchTuner() - try tuner.tune(mockSessionProtocol) - - verify(mockSessionProtocol) - .addMovieInput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .addMovieFileOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .addCapturePhotoOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .startRunning() - .with(returnType: Void.self) - } - - func testSessionLaunchTuner_whenRunning() throws { - stub(mockSessionProtocol) { proxy in - when(proxy.isRunning.get).thenReturn(true) - } - - let tuner = SessionLaunchTuner() - try tuner.tune(mockSessionProtocol) - - verify(mockSessionProtocol, never()) - .addMovieInput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .addMovieFileOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .addCapturePhotoOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .startRunning() - .with(returnType: Void.self) - } - func testSessionTerminationTuner_whenRunning() throws { stub(mockSessionProtocol) { proxy in when(proxy.isRunning.get).thenReturn(true)