diff --git a/Demo/Aespa-iOS.xcodeproj/project.pbxproj b/Demo/Aespa-iOS.xcodeproj/project.pbxproj new file mode 100644 index 0000000..bed7218 --- /dev/null +++ b/Demo/Aespa-iOS.xcodeproj/project.pbxproj @@ -0,0 +1,406 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { + +/* Begin PBXBuildFile section */ + 0716FED02A37326600B5AA1B /* GalleryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0716FECF2A37326600B5AA1B /* GalleryView.swift */; }; + 0716FED42A3737D700B5AA1B /* Aespa in Frameworks */ = {isa = PBXBuildFile; productRef = 0716FED32A3737D700B5AA1B /* Aespa */; }; + 07778FF12A31E3A000B1DC6C /* SettingView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 07778FF02A31E3A000B1DC6C /* SettingView.swift */; }; + 9CE5B7DC2A306F350058334D /* Aespa_iOSApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */; }; + 9CE5B7DE2A306F350058334D /* VideoContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DD2A306F350058334D /* VideoContentView.swift */; }; + 9CE5B7E02A306F370058334D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9CE5B7DF2A306F370058334D /* Assets.xcassets */; }; + 9CE5B7E32A306F370058334D /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */; }; + 9CE5B80B2A3070380058334D /* VideoContentViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 0716FECF2A37326600B5AA1B /* GalleryView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GalleryView.swift; sourceTree = ""; }; + 07778FF02A31E3A000B1DC6C /* SettingView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingView.swift; sourceTree = ""; }; + 07F81CEE2A49A21B00DEDB04 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; + 9CE5B7D82A306F350058334D /* Aespa-iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Aespa-iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Aespa_iOSApp.swift; sourceTree = ""; }; + 9CE5B7DD2A306F350058334D /* VideoContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoContentView.swift; sourceTree = ""; }; + 9CE5B7DF2A306F370058334D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoContentViewModel.swift; sourceTree = ""; }; + 9CE5B80E2A316BC00058334D /* Aespa-iOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "Aespa-iOS.entitlements"; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 9CE5B7D52A306F350058334D /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 0716FED42A3737D700B5AA1B /* Aespa in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 07F81CED2A49A21B00DEDB04 /* Packages */ = { + isa = PBXGroup; + children = ( + 07F81CEE2A49A21B00DEDB04 /* Aespa */, + ); + name = Packages; + sourceTree = ""; + }; + 9CE5B7CF2A306F350058334D = { + isa = PBXGroup; + children = ( + 07F81CED2A49A21B00DEDB04 /* Packages */, + 9CE5B7DA2A306F350058334D /* Aespa-iOS */, + 9CE5B7D92A306F350058334D /* Products */, + 9CE5B8072A3070200058334D /* Frameworks */, + ); + sourceTree = ""; + }; + 9CE5B7D92A306F350058334D /* Products */ = { + isa = PBXGroup; + children = ( + 9CE5B7D82A306F350058334D /* Aespa-iOS.app */, + ); + name = Products; + sourceTree = ""; + }; + 9CE5B7DA2A306F350058334D /* Aespa-iOS */ = { + isa = PBXGroup; + children = ( + 9CE5B80E2A316BC00058334D /* Aespa-iOS.entitlements */, + 9CE5B7DB2A306F350058334D /* Aespa_iOSApp.swift */, + 9CE5B7DD2A306F350058334D /* VideoContentView.swift */, + 0716FECF2A37326600B5AA1B /* GalleryView.swift */, + 07778FF02A31E3A000B1DC6C /* SettingView.swift */, + 9CE5B80A2A3070380058334D /* VideoContentViewModel.swift */, + 9CE5B7DF2A306F370058334D /* Assets.xcassets */, + 9CE5B7E12A306F370058334D /* Preview Content */, + ); + path = "Aespa-iOS"; + sourceTree = ""; + }; + 9CE5B7E12A306F370058334D /* Preview Content */ = { + isa = PBXGroup; + children = ( + 9CE5B7E22A306F370058334D /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 9CE5B8072A3070200058334D /* Frameworks */ = { + isa = PBXGroup; + children = ( + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 9CE5B7D72A306F350058334D /* Aespa-iOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = 9CE5B7FC2A306F370058334D /* Build configuration list for PBXNativeTarget "Aespa-iOS" */; + buildPhases = ( + 9CE5B7D42A306F350058334D /* Sources */, + 9CE5B7D52A306F350058334D /* Frameworks */, + 9CE5B7D62A306F350058334D /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "Aespa-iOS"; + packageProductDependencies = ( + 0716FED32A3737D700B5AA1B /* Aespa */, + ); + productName = "Aespa-iOS"; + productReference = 9CE5B7D82A306F350058334D /* Aespa-iOS.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 9CE5B7D02A306F350058334D /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1430; + LastUpgradeCheck = 1430; + TargetAttributes = { + 9CE5B7D72A306F350058334D = { + CreatedOnToolsVersion = 14.3; + }; + }; + }; + buildConfigurationList = 9CE5B7D32A306F350058334D /* Build configuration list for PBXProject "Aespa-iOS" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 9CE5B7CF2A306F350058334D; + packageReferences = ( + ); + productRefGroup = 9CE5B7D92A306F350058334D /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 9CE5B7D72A306F350058334D /* Aespa-iOS */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 9CE5B7D62A306F350058334D /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 9CE5B7E32A306F370058334D /* Preview Assets.xcassets in Resources */, + 9CE5B7E02A306F370058334D /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 9CE5B7D42A306F350058334D /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 9CE5B7DE2A306F350058334D /* VideoContentView.swift in Sources */, + 07778FF12A31E3A000B1DC6C /* SettingView.swift in Sources */, + 9CE5B7DC2A306F350058334D /* Aespa_iOSApp.swift in Sources */, + 9CE5B80B2A3070380058334D /* VideoContentViewModel.swift in Sources */, + 0716FED02A37326600B5AA1B /* GalleryView.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 9CE5B7FA2A306F370058334D /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 9CE5B7FB2A306F370058334D /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 9CE5B7FD2A306F370058334D /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_ENTITLEMENTS = "Aespa-iOS/Aespa-iOS.entitlements"; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"Aespa-iOS/Preview Content\""; + DEVELOPMENT_TEAM = W6QHM4Y43Z; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSAppleMusicUsageDescription = ""; + INFOPLIST_KEY_NSCameraUsageDescription = "Use camera"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use mic"; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "Use and add album"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use album"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "co.enebin.Aespa-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 9CE5B7FE2A306F370058334D /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_ENTITLEMENTS = "Aespa-iOS/Aespa-iOS.entitlements"; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"Aespa-iOS/Preview Content\""; + DEVELOPMENT_TEAM = W6QHM4Y43Z; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSAppleMusicUsageDescription = ""; + INFOPLIST_KEY_NSCameraUsageDescription = "Use camera"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use mic"; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "Use and add album"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use album"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "co.enebin.Aespa-iOS"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 9CE5B7D32A306F350058334D /* Build configuration list for PBXProject "Aespa-iOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9CE5B7FA2A306F370058334D /* Debug */, + 9CE5B7FB2A306F370058334D /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 9CE5B7FC2A306F370058334D /* Build configuration list for PBXNativeTarget "Aespa-iOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 9CE5B7FD2A306F370058334D /* Debug */, + 9CE5B7FE2A306F370058334D /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + +/* Begin XCSwiftPackageProductDependency section */ + 0716FED32A3737D700B5AA1B /* Aespa */ = { + isa = XCSwiftPackageProductDependency; + productName = Aespa; + }; +/* End XCSwiftPackageProductDependency section */ + }; + rootObject = 9CE5B7D02A306F350058334D /* Project object */; +} diff --git a/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..919434a --- /dev/null +++ b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000..18d9810 --- /dev/null +++ b/Demo/Aespa-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/Demo/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme b/Demo/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme new file mode 100644 index 0000000..096fbb8 --- /dev/null +++ b/Demo/Aespa-iOS.xcodeproj/xcshareddata/xcschemes/Aespa-iOS.xcscheme @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Demo/Aespa-iOS/Aespa-iOS.entitlements b/Demo/Aespa-iOS/Aespa-iOS.entitlements new file mode 100644 index 0000000..0c67376 --- /dev/null +++ b/Demo/Aespa-iOS/Aespa-iOS.entitlements @@ -0,0 +1,5 @@ + + + + + diff --git a/Demo/Aespa-iOS/Aespa_iOSApp.swift b/Demo/Aespa-iOS/Aespa_iOSApp.swift new file mode 100644 index 0000000..1f8b392 --- /dev/null +++ b/Demo/Aespa-iOS/Aespa_iOSApp.swift @@ -0,0 +1,17 @@ +// +// Aespa_iOSApp.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import SwiftUI + +@main +struct Aespa_iOSApp: App { + var body: some Scene { + WindowGroup { + VideoContentView() + } + } +} diff --git a/Demo/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000..eb87897 --- /dev/null +++ b/Demo/Aespa-iOS/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Demo/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..13613e3 --- /dev/null +++ b/Demo/Aespa-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,13 @@ +{ + "images" : [ + { + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Demo/Aespa-iOS/Assets.xcassets/Contents.json b/Demo/Aespa-iOS/Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/Demo/Aespa-iOS/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Demo/Aespa-iOS/GalleryView.swift b/Demo/Aespa-iOS/GalleryView.swift new file mode 100644 index 0000000..fe4cfbb --- /dev/null +++ b/Demo/Aespa-iOS/GalleryView.swift @@ -0,0 +1,78 @@ +// +// GalleryView.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/12. +// + +import Aespa +import SwiftUI + +struct GalleryView: View { + @ObservedObject var viewModel: VideoContentViewModel + + @Binding private var mediaType: MediaType + + init( + mediaType: Binding, + contentViewModel viewModel: VideoContentViewModel + ) { + self._mediaType = mediaType + self.viewModel = viewModel + } + + var body: some View { + VStack(alignment: .center) { + Picker("File", selection: $mediaType) { + Text("Video").tag(MediaType.video) + Text("Photo").tag(MediaType.photo) + } + .pickerStyle(.segmented) + .frame(width: 200) + .padding(.vertical) + + ScrollView { + switch mediaType { + case .photo: + LazyVGrid( + columns: [GridItem(.flexible()), GridItem(.flexible()), GridItem(.flexible())], + spacing: 5 + ) { + ForEach(viewModel.photoFiles) { file in + let image = Image(uiImage: file.thumbnail ?? UIImage()) + + image + .resizable() + .scaledToFill() + } + } + .onAppear { + viewModel.fetchPhotoFiles() + } + case .video: + LazyVGrid( + columns: [GridItem(.flexible()), GridItem(.flexible()), GridItem(.flexible())], + spacing: 5 + ) { + ForEach(viewModel.videoFiles) { file in + let image = Image(uiImage: file.thumbnail ?? UIImage()) + + image + .resizable() + .scaledToFill() + } + } + .onAppear { + viewModel.fetchVideoFiles() + } + } + } + } + } +} + +struct GalleryView_Previews: PreviewProvider { + static var previews: some View { + GalleryView(mediaType: .constant(.video), contentViewModel: .init()) + } +} diff --git a/Demo/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json b/Demo/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/Demo/Aespa-iOS/Preview Content/Preview Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Demo/Aespa-iOS/SettingView.swift b/Demo/Aespa-iOS/SettingView.swift new file mode 100644 index 0000000..5e60a15 --- /dev/null +++ b/Demo/Aespa-iOS/SettingView.swift @@ -0,0 +1,97 @@ +// +// SettingView.swift +// Aespa-iOS +// +// Created by Young Bin on 2023/06/08. +// + +import SwiftUI +import AVFoundation + +struct SettingView: View { + @ObservedObject var viewModel: VideoContentViewModel + + @State private var quality: AVCaptureSession.Preset + @State private var focusMode: AVCaptureDevice.FocusMode + + @State private var isMuted: Bool + + @State private var flashMode: AVCaptureDevice.FlashMode + + init(contentViewModel viewModel: VideoContentViewModel) { + self.viewModel = viewModel + + self.quality = viewModel.aespaSession.avCaptureSession.sessionPreset + self.focusMode = viewModel.aespaSession.currentFocusMode ?? .continuousAutoFocus + + self.isMuted = viewModel.aespaSession.isMuted + + self.flashMode = viewModel.aespaSession.currentSetting.flashMode + } + + var body: some View { + List { + Section(header: Text("Common")) { + Picker("Quality", selection: $quality) { + Text("Low").tag(AVCaptureSession.Preset.low) + Text("Medium").tag(AVCaptureSession.Preset.medium) + Text("High").tag(AVCaptureSession.Preset.high) + } + .modifier(TitledPicker(title: "Asset quality")) + .onChange(of: quality) { newValue in + viewModel.aespaSession.quality(to: newValue) + } + + Picker("Focus", selection: $focusMode) { + Text("Auto").tag(AVCaptureDevice.FocusMode.autoFocus) + Text("Locked").tag(AVCaptureDevice.FocusMode.locked) + Text("Continuous").tag(AVCaptureDevice.FocusMode.continuousAutoFocus) + } + .modifier(TitledPicker(title: "Focus mode")) + .onChange(of: focusMode) { newValue in + viewModel.aespaSession.focus(mode: newValue) + } + } + + Section(header: Text("Video")) { + Picker("Mute", selection: $isMuted) { + Text("Unmute").tag(false) + Text("Mute").tag(true) + } + .modifier(TitledPicker(title: "Mute")) + .onChange(of: isMuted) { newValue in + _ = newValue ? + viewModel.aespaSession.mute() : + viewModel.aespaSession.unmute() + } + } + + Section(header: Text("Photo")) { + Picker("Flash", selection: $flashMode) { + Text("On").tag(AVCaptureDevice.FlashMode.on) + Text("Off").tag(AVCaptureDevice.FlashMode.off) + Text("Auto").tag(AVCaptureDevice.FlashMode.auto) + } + .modifier(TitledPicker(title: "Flash mode")) + .onChange(of: flashMode) { newValue in + viewModel.aespaSession.flashMode(to: newValue) + } + } + } + } + + struct TitledPicker: ViewModifier { + let title: String + func body(content: Content) -> some View { + VStack(alignment: .leading) { + Text(title) + .foregroundColor(.gray) + .font(.caption) + + content + .pickerStyle(.segmented) + .frame(height: 40) + } + } + } +} diff --git a/Demo/Aespa-iOS/VideoContentView.swift b/Demo/Aespa-iOS/VideoContentView.swift new file mode 100644 index 0000000..f370d12 --- /dev/null +++ b/Demo/Aespa-iOS/VideoContentView.swift @@ -0,0 +1,162 @@ +// +// VideoContentView.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import Aespa +import SwiftUI + +struct VideoContentView: View { + @State var isRecording = false + @State var isFront = false + + @State var showSetting = false + @State var showGallery = false + + @State var captureMode: MediaType = .video + + @ObservedObject private var viewModel = VideoContentViewModel() + + var body: some View { + ZStack { + viewModel.preview + .frame(minWidth: 0, + maxWidth: .infinity, + minHeight: 0, + maxHeight: .infinity) + .edgesIgnoringSafeArea(.all) + + VStack { + ZStack(alignment: .center) { + // Mode change + Picker("Capture Modes", selection: $captureMode) { + Text("Video").tag(MediaType.video) + Text("Photo").tag(MediaType.photo) + } + .pickerStyle(.segmented) + .background(Color.black.opacity(0.7)) + .cornerRadius(8) + .frame(width: 200) + + HStack { + Spacer() + + Button(action: { showSetting = true }) { + Image(systemName: "gear") + .resizable() + .foregroundColor(.white) + .scaledToFit() + .frame(width: 30, height: 30) + + } + .padding(20) + .contentShape(Rectangle()) + } + } + + Spacer() + + ZStack { + HStack { + // Album thumbnail + button + Button(action: { showGallery = true }) { + let coverImage = ( + captureMode == .video + ? viewModel.videoAlbumCover + : viewModel.photoAlbumCover) + ?? Image("") + + roundRectangleShape(with: coverImage, size: 80) + } + .shadow(radius: 5) + .contentShape(Rectangle()) + + Spacer() + + // Position change + button + Button(action: { + viewModel.aespaSession.position(to: isFront ? .back : .front) + isFront.toggle() + }) { + Image(systemName: "arrow.triangle.2.circlepath.camera.fill") + .resizable() + .foregroundColor(.white) + .scaledToFit() + .frame(width: 50, height: 50) + .padding(20) + .padding(.trailing, 20) + } + .shadow(radius: 5) + .contentShape(Rectangle()) + } + + // Shutter + button + recordingButtonShape(width: 60).onTapGesture { + switch captureMode { + case .video: + if isRecording { + viewModel.aespaSession.stopRecording() + isRecording = false + } else { + viewModel.aespaSession.startRecording() + isRecording = true + } + case .photo: + viewModel.aespaSession.capturePhoto() + } + } + } + } + } + .sheet(isPresented: $showSetting) { + SettingView(contentViewModel: viewModel) + } + .sheet(isPresented: $showGallery) { + GalleryView(mediaType: $captureMode, contentViewModel: viewModel) + } + } +} + +extension VideoContentView { + @ViewBuilder + func roundRectangleShape(with image: Image, size: CGFloat) -> some View { + image + .resizable() + .scaledToFill() + .frame(width: size, height: size, alignment: .center) + .clipped() + .cornerRadius(10) + .overlay( + RoundedRectangle(cornerRadius: 10) + .stroke(.white, lineWidth: 1) + ) + .padding(20) + } + + @ViewBuilder + func recordingButtonShape(width: CGFloat) -> some View { + ZStack { + Circle() + .strokeBorder(isRecording ? .red : .white, lineWidth: 3) + .frame(width: width) + + Circle() + .fill(isRecording ? .red : .white) + .frame(width: width * 0.8) + } + .frame(height: width) + } +} + +enum MediaType { + case video + case photo +} + +struct VideoContentView_Previews: PreviewProvider { + static var previews: some View { + VideoContentView() + } +} diff --git a/Demo/Aespa-iOS/VideoContentViewModel.swift b/Demo/Aespa-iOS/VideoContentViewModel.swift new file mode 100644 index 0000000..7862117 --- /dev/null +++ b/Demo/Aespa-iOS/VideoContentViewModel.swift @@ -0,0 +1,112 @@ +// +// VideoContentViewModel.swift +// Aespa-iOS +// +// Created by 이영빈 on 2023/06/07. +// + +import Combine +import SwiftUI +import Foundation + +import Aespa + +class VideoContentViewModel: ObservableObject { + let aespaSession: AespaSession + + var preview: some View { + return aespaSession.interactivePreview() + + // Or you can give some options +// let option = InteractivePreviewOption(enableShowingCrosshair: false) +// return aespaSession.interactivePreview(option: option) + } + + private var subscription = Set() + + @Published var videoAlbumCover: Image? + @Published var photoAlbumCover: Image? + + @Published var videoFiles: [VideoFile] = [] + @Published var photoFiles: [PhotoFile] = [] + + init() { + let option = AespaOption(albumName: "Aespa-Demo") + self.aespaSession = Aespa.session(with: option) + + // Common setting + aespaSession + .focus(mode: .continuousAutoFocus) + .changeMonitoring(enabled: true) + .orientation(to: .portrait) + .quality(to: .high) + .custom(WideColorCameraTuner()) { result in + if case .failure(let error) = result { + print("Error: ", error) + } + } + + // Photo-only setting + aespaSession + .flashMode(to: .on) + .redEyeReduction(enabled: true) + + // Video-only setting + aespaSession + .mute() + .stabilization(mode: .auto) + + // Prepare video album cover + aespaSession.videoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.videoAlbumCover, on: self) + .store(in: &subscription) + + // Prepare photo album cover + aespaSession.photoFilePublisher + .receive(on: DispatchQueue.main) + .map { result -> Image? in + if case .success(let file) = result { + return file.thumbnailImage + } else { + return nil + } + } + .assign(to: \.photoAlbumCover, on: self) + .store(in: &subscription) + } + + func fetchVideoFiles() { + // File fetching task can cause low reponsiveness when called from main thread + DispatchQueue.global().async { + let fetchedFiles = self.aespaSession.fetchVideoFiles() + + DispatchQueue.main.async { + self.videoFiles = fetchedFiles + } + } + } + + func fetchPhotoFiles() { + // File fetching task can cause low reponsiveness when called from main thread + let fetchedFiles = self.aespaSession.fetchPhotoFiles() + self.photoFiles = fetchedFiles + } +} + + +extension VideoContentViewModel { + // Example for using custom session tuner + struct WideColorCameraTuner: AespaSessionTuning { + func tune(_ session: T) throws where T : AespaCoreSessionRepresentable { + session.avCaptureSession.automaticallyConfiguresCaptureDeviceForWideColor = true + } + } +} diff --git a/Demo/Package.swift b/Demo/Package.swift new file mode 100644 index 0000000..da8e907 --- /dev/null +++ b/Demo/Package.swift @@ -0,0 +1,3 @@ +import PackageDescription + +let package = Package() diff --git a/README.md b/README.md index c1d3c0e..3dd5fe3 100644 --- a/README.md +++ b/README.md @@ -5,14 +5,13 @@
-### Add a camera in just 3 lines +### From camera to album. In just 2 lines.
``` Swift let aespaOption = AespaOption(albumName: "YOUR_ALBUM_NAME") let aespaSession = Aespa.session(with: aespaOption) -try await Aespa.configure() // Done! ``` @@ -48,16 +47,12 @@ try await Aespa.configure() ## Introduction Aespa is a robust and intuitive Swift package for video capturing, built with a focus on the ease of setting up and usage. -**This package provides a high-level API over Apple's `AVFoundation` framework**: abstracting away its complexity and making it straightforward for developers to implement video capturing functionalities in their iOS applications. +It is designed to be easy to use from beginners to intermediate developers. If you're new to video recording on iOS or if you're looking to simplify your existing camera setup, Aespa could be the perfect fit for your project. -**This package provides a clean, user-friendly API for common video recording tasks**: including starting and stopping recording, managing audio settings, adjusting video quality, setting camera position, etc. - - -## Features -Aespa is designed to be easy to use for both beginners and experienced developers. If you're new to video recording on iOS or if you're looking to simplify your existing recording setup, Aespa could be the perfect fit for your project. +### ✅ Super easy to use
- ✅ Super easy to use + Zip the boring configuration routine *Before* ``` mermaid @@ -69,6 +64,7 @@ AS -- "Connect" --> AIA["AVCaptureAudioInput"] AS -- "Add" --> FO["AVCaptureFileOutput"] FO --> PHCollectionListChangeRequest ``` + **Aespa** ``` mermaid graph LR @@ -80,7 +76,7 @@ graph LR
- ✅ Offer essential preset configuration & customization + Offer essential preset configuration & customization ``` mermaid graph TD @@ -96,7 +92,16 @@ AS --> D["Fetching asset files"]
- ✅ Combine & async support + Comprehensive error handling + +- The package provides comprehensive error handling, allowing you to build robust applications with minimal effort. + +
+ +### ✅ No more delegate +
+ + Combine support ``` mermaid graph LR; @@ -110,12 +115,12 @@ graph LR;
-
- ✅ Comprehensive error handling - -- The package provides comprehensive error handling, allowing you to build robust applications with minimal effort. +### ✅ Also +- Automated system permission management. +- Seamless image and video capture within a single preview session. +- Thread-safe. +- Support SPM. -
## Functionality @@ -123,13 +128,24 @@ graph LR; > > You can access our **official documentation** for the most comprehensive and up-to-date explanations in [here](https://enebin.github.io/Aespa/documentation/aespa/) +### `InteractivePreview` +One of our main feature, `InteractivePreview` provides a comprehensive and intuitive way for users to interact directly with the camera preview. + +| Features | Description | +|------------------------|------------------------------------------------------------------------------------------------------------------| +| Tap to focus | Adjusts the focus of the camera based on the tapped area on the screen. | +| Double tap to change camera | Switches between the front and back camera upon double tapping. | +| Pinch zoom | Allows zooming in or out on the preview by using a pinch gesture. | + + +### More manual options | Common | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| -| ✨ `zoom` | Modifies the zoom factor. | -| ✨ `setPosition` | Changes the camera position. | -| `setOrientation` | Modifies the orientation. | -| `setAutofocusing` | Alters the autofocusing mode. | -| `setQuality` | Adjusts the video quality preset for the recording session. | +| ✨ `zoom` | Modifies the zoom factor. | +| ✨ `position` | Changes the camera position. | +| `orientation` | Modifies the orientation. | +| `focus` | Alters the autofocusing mode. | +| `quality` | Adjusts the video quality preset for the recording session. | | `doctor` | Checks if essential conditions to start recording are satisfied. | | `previewLayerPublisher` | Responsible for emitting updates to the preview layer. | @@ -139,8 +155,8 @@ graph LR; | ✨ `stopRecording` | Terminates the current video recording session and attempts to save the video file. | | `mute` | Mutes the audio input. | | `unmute` | Restores the audio input. | -| `setStabilization` | Alters the stabilization mode. | -| `setTorch` | Adjusts the torch mode and level. | +| `stabilization` | Alters the stabilization mode. | +| `torch` | Adjusts the torch mode and level. | | `customize` | Customizes the session with a specific tuning configuration. | | ✨ `fetchVideoFiles` | Fetches a list of recorded video files. | | `videoFilePublisher` | Emits a `Result` object containing a latest video file data. | @@ -148,12 +164,13 @@ graph LR; | Photo | Description | |----------------------------------|------------------------------------------------------------------------------------------------------------------| | ✨ `capturePhoto` | Capture a photo and returns a result image file. | -| ✨ `setFlashMode` | Sets the flash mode for the photo capture session. | +| ✨ `flashMode` | Sets the flash mode for the photo capture session. | | `redEyeReduction` | Enables or disables red-eye reduction for the photo capture session. | | `customize` | Customizes the photo capture session with a specific `AVCapturePhotoSettings`. | | ✨ `fetchPhotoFiles` | Fetches a list of captured photos files. | | `photoFilePublisher` | Emits a `Result` object containing a latest image file data. | + ## Installation ### Swift Package Manager (SPM) Follow these steps to install **Aespa** using SPM: @@ -166,7 +183,7 @@ https://github.com/enebin/Aespa.git 3. For the `Version rule`, select `Up to Next Minor` and specify the current Aespa version then click `Next`. 4. On the final screen, select the `Aespa` library and then click `Finish`. -**Aespa** should now be integrated into your project 🚀 +**Aespa** should now be integrated into your project 🚀. ## Usage @@ -185,34 +202,27 @@ import Aespa let aespaOption = AespaOption(albumName: "YOUR_ALBUM_NAME") let aespaSession = Aespa.session(with: aespaOption) - -Task(priority: .background) { - try await Aespa.configure() -} ``` -> **Warning** -> -> Please ensure to call `configure` within a background execution context. Neglecting to do so may lead to significantly reduced responsiveness in your application. ([reference](https://developer.apple.com/documentation/avfoundation/avcapturesession/1388185-startrunning)) ## Implementation Exapmles ### Configuration ``` Swift // Common setting aespaSession - .setAutofocusing(mode: .continuousAutoFocus) - .setOrientation(to: .portrait) - .setQuality(to: .high) + .autofocusing(mode: .continuousAutoFocus) + .orientation(to: .portrait) + .quality(to: .high) .customize(WideColorCameraTuner()) // Photo-only setting aespaSession - .setFlashMode(to: .on) + .flashMode(to: .on) .redEyeReduction(enabled: true) // Video-only setting aespaSession .mute() - .setStabilization(mode: .auto) + .stabilization(mode: .auto) ``` ### Recording & Capture @@ -225,10 +235,26 @@ aespaSession.stopRecording() // Capture photo aespaSession.capturePhoto() ``` +### Get result +``` Swift +aespaSession.stopRecording { result in + switch result { + case .success(let file): + // + case .failure(let error): + print(error) + } +} -## SwiftUI Integration +// or +aespaSession.fetchVideoFiles(limit: 1) + +// or you can use publisher +aespaSession.videoFilePublisher.sink { result in ... } +``` -Aespa also provides a super-easy way to integrate video capture functionality into SwiftUI applications. AespaSession includes a helper method to create a SwiftUI `UIViewRepresentable` that provides a preview of the video capture. +## SwiftUI Integration +Aespa also provides a super-easy way to integrate video capture functionality into SwiftUI applications. `AespaSession` includes a helper method to create a SwiftUI `UIViewRepresentable` that provides a preview of the video capture. ### Example usage @@ -253,28 +279,20 @@ struct VideoContentView: View { class VideoContentViewModel: ObservableObject { let aespaSession: AespaSession - var preview: some UIViewRepresentable { - aespaSession.preview() + var preview: some View { + aespaSession.interactivePreview() } init() { let option = AespaOption(albumName: "Aespa-Demo") self.aespaSession = Aespa.session(with: option) - - Task(priority: .background) { - do { - try await Aespa.configure() - aespaSession - .setAutofocusing(mode: .continuousAutoFocus) - .setOrientation(to: .portrait) - .setQuality(to: .high) - - // Other settings ... - - } catch let error { - print(error) - } - } + + aespaSession + .autofocusing(mode: .continuousAutoFocus) + .orientation(to: .portrait) + .quality(to: .high) + + // Other settings... } } ``` diff --git a/Sources/Aespa/Aespa.swift b/Sources/Aespa/Aespa.swift index eef26e6..bf3d4ba 100644 --- a/Sources/Aespa/Aespa.swift +++ b/Sources/Aespa/Aespa.swift @@ -15,53 +15,41 @@ open class Aespa { /// - Parameters: /// - option: The `AespaOption` to configure the session. /// - Returns: The newly created `AespaSession`. - public static func session(with option: AespaOption) -> AespaSession { + public static func session( + with option: AespaOption, + onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { + if let core { return core } + let newCore = AespaSession(option: option) - core = newCore - // Check logging option Logger.enableLogging = option.log.loggingEnabled - - return newCore - } - - /// Configures the `AespaSession` for recording. - /// Call this method to start the flow of data from the capture session’s inputs to its outputs. - /// - /// This method ensures that necessary permissions are granted - /// and the session is properly configured before starting. - /// If either the session isn't configured or the necessary permissions aren't granted, - /// it throws an error. - /// - /// - Warning: This method is synchronous and blocks until the session starts running or it fails, - /// which it reports by posting an `AVCaptureSessionRuntimeError` notification. - public static func configure() async throws { - guard let core = core else { - throw AespaError.session(reason: .notConfigured) - } - - guard - case .permitted = await AuthorizationChecker.checkCaptureAuthorizationStatus() - else { - throw AespaError.permission(reason: .denied) + + // Configure session now + Task { + guard + case .permitted = await AuthorizationChecker.checkCaptureAuthorizationStatus() + else { + throw AespaError.permission(reason: .denied) + } + + newCore.startSession(onComplete) } - - try core.startSession() - - Logger.log(message: "Session is configured successfully") + + core = newCore + return newCore } - + /// Terminates the current `AespaSession`. /// /// If a session has been started, it stops the session and releases resources. /// After termination, a new session needs to be configured to start recording again. - public static func terminate() throws { + public static func terminate(_ onComplete: @escaping CompletionHandler = { _ in }) throws { guard let core = core else { return } - try core.terminateSession() - Logger.log(message: "Session is terminated successfully") + core.terminateSession(onComplete) } } diff --git a/Sources/Aespa/AespaError.swift b/Sources/Aespa/AespaError.swift index cc3b0be..bc4670b 100644 --- a/Sources/Aespa/AespaError.swift +++ b/Sources/Aespa/AespaError.swift @@ -53,8 +53,10 @@ public extension AespaError { "Output is already exists" case unableToSetOutput = "Unable to set output." - case unsupported = - "Unsupported device (supported on iPhone XR and later devices)" + case notSupported = + "Unsupported functionality." + case busy = + "Device is busy now." } enum PermissionErrorReason: String { diff --git a/Sources/Aespa/AespaSession.swift b/Sources/Aespa/AespaSession.swift index 6d0689f..e568c42 100644 --- a/Sources/Aespa/AespaSession.swift +++ b/Sources/Aespa/AespaSession.swift @@ -20,8 +20,8 @@ import AVFoundation /// /// It also includes functionalities to fetch video files. open class AespaSession { - private let option: AespaOption - private let coreSession: AespaCoreSession + let option: AespaOption + let coreSession: AespaCoreSession private let fileManager: AespaCoreFileManager private let albumManager: AespaCoreAlbumManager @@ -102,6 +102,11 @@ open class AespaSession { public var avCaptureSession: AVCaptureSession { coreSession } + + /// This property indicates whether the current session is active or not. + public var isRunning: Bool { + coreSession.isRunning + } /// This property provides the maximum zoom factor supported by the active video device format. public var maxZoomFactor: CGFloat? { @@ -127,6 +132,21 @@ open class AespaSession { return connection.videoOrientation } + /// This property reflects the device's current position. + public var currentCameraPosition: AVCaptureDevice.Position? { + guard let device = coreSession.videoDeviceInput?.device else { return nil } + return device.position + } + + /// This property indicates whether the camera device is set to monitor changes in the subject area. + /// + /// Enabling subject area change monitoring allows the device to adjust focus and exposure settings automatically + /// when the subject within the specified area changes. + public var isSubjectAreaChangeMonitoringEnabled: Bool? { + guard let device = coreSession.videoDeviceInput?.device else { return nil } + return device.isSubjectAreaChangeMonitoringEnabled + } + /// This publisher is responsible for emitting updates to the preview layer. /// /// A log message is printed to the console every time a new layer is pushed. @@ -138,8 +158,20 @@ open class AespaSession { .compactMap { $0 } .eraseToAnyPublisher() } - + // MARK: - Utilities + /// Returns a publisher that emits a `Notification` when the subject area of the capture device changes. + /// + /// This is useful when you want to react to changes in the capture device's subject area, + /// such as when the user changes the zoom factor, or when the device changes its autofocus area. + /// + /// - Returns: An `AnyPublisher` instance that emits `Notification` values. + public func getSubjectAreaDidChangePublisher() -> AnyPublisher { + return NotificationCenter.default + .publisher(for: NSNotification.Name.AVCaptureDeviceSubjectAreaDidChange) + .eraseToAnyPublisher() + } + /// Checks if essential conditions to start recording are satisfied. /// This includes checking for capture authorization, if the session is running, /// if there is an existing connection and if a device is attached. @@ -177,96 +209,125 @@ extension AespaSession: CommonContext { } @discardableResult - public func setQualityWithError(to preset: AVCaptureSession.Preset) throws -> AespaSession { + public func quality( + to preset: AVCaptureSession.Preset, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = QualityTuner(videoQuality: preset) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func setPositionWithError(to position: AVCaptureDevice.Position) throws -> AespaSession { + public func position( + to position: AVCaptureDevice.Position, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = CameraPositionTuner(position: position, devicePreference: option.session.cameraDevicePreference) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func setOrientationWithError(to orientation: AVCaptureVideoOrientation) throws -> AespaSession { + public func orientation( + to orientation: AVCaptureVideoOrientation, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { let tuner = VideoOrientationTuner(orientation: orientation) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func setAutofocusingWithError(mode: AVCaptureDevice.FocusMode) throws -> AespaSession { - let tuner = AutoFocusTuner(mode: mode) - try coreSession.run(tuner) + public func focus( + mode: AVCaptureDevice.FocusMode, point: CGPoint? = nil, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { + let tuner = FocusTuner(mode: mode, point: point) + coreSession.run(tuner, onComplete) return self } @discardableResult - public func zoomWithError(factor: CGFloat) throws -> AespaSession { + public func zoom(factor: CGFloat, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { let tuner = ZoomTuner(zoomFactor: factor) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) return self } - - public func customizeWithError(_ tuner: T) throws -> AespaSession { - try coreSession.run(tuner) + + @discardableResult + public func changeMonitoring(enabled: Bool, _ onComplete: @escaping CompletionHandler = { _ in }) -> AespaSession { + let tuner = ChangeMonitoringTuner(isSubjectAreaChangeMonitoringEnabled: enabled) + coreSession.run(tuner, onComplete) + return self + } + + @discardableResult + public func custom( + _ tuner: T, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaSession { + coreSession.run(tuner, onComplete) return self } } extension AespaSession: VideoContext { public typealias AespaVideoSessionContext = AespaVideoContext - + public var underlyingVideoContext: AespaVideoSessionContext { videoContext } - + public var videoFilePublisher: AnyPublisher, Never> { videoContext.videoFilePublisher } - + public var isRecording: Bool { videoContext.isRecording } - + public var isMuted: Bool { videoContext.isMuted } - - public func startRecordingWithError() throws { - try videoContext.startRecordingWithError() + + public func startRecording(_ onComplete: @escaping CompletionHandler = { _ in }) { + videoContext.startRecording(onComplete) } - @discardableResult - public func stopRecordingWithError() async throws -> VideoFile { - try await videoContext.stopRecordingWithError() + public func stopRecording(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + videoContext.stopRecording(completionHandler) } - + @discardableResult - public func muteWithError() throws -> AespaVideoSessionContext { - try videoContext.muteWithError() + public func mute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.mute(onComplete) } - + @discardableResult - public func unmuteWithError() throws -> AespaVideoSessionContext { - try videoContext.unmuteWithError() + public func unmute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoSessionContext { + videoContext.unmute(onComplete) } - + @discardableResult - public func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoSessionContext { - try videoContext.setStabilizationWithError(mode: mode) + public func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoSessionContext { + videoContext.stabilization(mode: mode, onComplete) } @discardableResult - public func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoSessionContext { - try videoContext.setTorchWithError(mode: mode, level: level) + public func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoSessionContext { + videoContext.torch(mode: mode, level: level, onComplete) } - public func fetchVideoFiles(limit: Int) -> [VideoFile] { + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { videoContext.fetchVideoFiles(limit: limit) } } @@ -284,43 +345,43 @@ extension AespaSession: PhotoContext { photoContext.currentSetting } - public func capturePhotoWithError() async throws -> PhotoFile { - try await photoContext.capturePhotoWithError() - } + public func capturePhoto(_ completionHandler: @escaping (Result) -> Void = { _ in }) { + photoContext.capturePhoto(completionHandler) + } + @discardableResult - public func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { - photoContext.setFlashMode(to: mode) + public func flashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { + photoContext.flashMode(to: mode) } @discardableResult public func redEyeReduction(enabled: Bool) -> AespaPhotoContext { photoContext.redEyeReduction(enabled: enabled) } - - public func custom(_ setting: AVCapturePhotoSettings) { - photoSetting = setting - } - - public func fetchPhotoFiles(limit: Int) -> [PhotoFile] { - photoContext.fetchPhotoFiles(limit: limit) - } + @discardableResult public func custom(_ setting: AVCapturePhotoSettings) -> AespaPhotoContext { photoContext.custom(setting) } + + public func fetchPhotoFiles(limit: Int = 0) -> [PhotoFile] { + photoContext.fetchPhotoFiles(limit: limit) + } } extension AespaSession { - func startSession() throws { - let tuner = SessionLaunchTuner() - try coreSession.run(tuner) - - previewLayerSubject.send(previewLayer) + func startSession(_ onComplete: @escaping CompletionHandler) { + do { + try coreSession.start() + previewLayerSubject.send(previewLayer) + } catch let error { + onComplete(.failure(error)) + } } - - func terminateSession() throws { + + func terminateSession(_ onComplete: @escaping CompletionHandler) { let tuner = SessionTerminationTuner() - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) } } diff --git a/Sources/Aespa/Context/Context.swift b/Sources/Aespa/Context/Context.swift deleted file mode 100644 index 399cf00..0000000 --- a/Sources/Aespa/Context/Context.swift +++ /dev/null @@ -1,547 +0,0 @@ -// -// File.swift -// -// -// Created by 이영빈 on 2023/06/24. -// - -import UIKit -import Combine -import Foundation -import AVFoundation - -public typealias ErrorHandler = (Error) -> Void - -public protocol CommonContext { - associatedtype CommonContextType: CommonContext & VideoContext & PhotoContext - - var underlyingCommonContext: CommonContextType { get } - - /// Sets the quality preset for the video recording session. - /// - /// - Parameter preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setQualityWithError(to preset: AVCaptureSession.Preset) throws -> CommonContextType - - /// Sets the camera position for the video recording session. - /// - /// It refers to `AespaOption.Session.cameraDevicePreference` when choosing the camera device. - /// - /// - Parameter position: An `AVCaptureDevice.Position` value indicating the camera position to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setPositionWithError(to position: AVCaptureDevice.Position) throws -> CommonContextType - - /// Sets the orientation for the session. - /// - /// - Parameter orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - /// - /// - Note: It sets the orientation of the video you are recording, - /// not the orientation of the `AVCaptureVideoPreviewLayer`. - @discardableResult func setOrientationWithError(to orientation: AVCaptureVideoOrientation) throws -> CommonContextType - - /// Sets the autofocusing mode for the video recording session. - /// - /// - Parameter mode: The focus mode(`AVCaptureDevice.FocusMode`) for the session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setAutofocusingWithError(mode: AVCaptureDevice.FocusMode) throws -> CommonContextType - - /// Sets the zoom factor for the video recording session. - /// - /// - Parameter factor: A `CGFloat` value indicating the zoom factor to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func zoomWithError(factor: CGFloat) throws -> CommonContextType - - /// This function provides a way to use a custom tuner to modify the current session. - /// The tuner must conform to `AespaSessionTuning`. - /// - /// - Parameter tuner: An instance that conforms to `AespaSessionTuning`. - /// - Throws: If the session fails to run the tuner. - @discardableResult func customizeWithError(_ tuner: T) throws -> CommonContextType -} - -// MARK: Non-throwing methods -// These methods encapsulate error handling within the method itself rather than propagating it to the caller. -// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. -// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. -extension CommonContext { - /// Sets the quality preset for the video recording session. - /// - /// - Parameter preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func setQuality( - to preset: AVCaptureSession.Preset, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.setQualityWithError(to: preset) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the camera position for the video recording session. - /// - /// - Parameter position: An `AVCaptureDevice.Position` value indicating the camera position to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func setPosition( - to position: AVCaptureDevice.Position, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.setPositionWithError(to: position) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the orientation for the session. - /// - /// - Parameter orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Note: It sets the orientation of the video you are recording, - /// not the orientation of the `AVCaptureVideoPreviewLayer`. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func setOrientation( - to orientation: AVCaptureVideoOrientation, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.setOrientationWithError(to: orientation) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the autofocusing mode for the video recording session. - /// - /// - Parameter mode: The focus mode for the capture device. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func setAutofocusing( - mode: AVCaptureDevice.FocusMode, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.setAutofocusingWithError(mode: mode) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - /// Sets the zoom factor for the video recording session. - /// - /// - Parameter factor: A `CGFloat` value indicating the zoom factor to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func zoom( - factor: CGFloat, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.zoomWithError(factor: factor) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } - - @discardableResult - public func custom( - _ tuner: T, - errorHandler: ErrorHandler? = nil - ) -> CommonContextType { - do { - return try self.customizeWithError(tuner) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingCommonContext - } -} - - -public protocol VideoContext { - associatedtype VideoContextType: VideoContext - - var underlyingVideoContext: VideoContextType { get } - - var isRecording: Bool { get } - - /// This publisher is responsible for emitting `VideoFile` objects resulting from completed recordings. - /// - /// In the case of an error, it logs the error before forwarding it wrapped in a `Result.failure`. - /// If you don't want to show logs, set `enableLogging` to `false` from `AespaOption.Log` - /// - /// - Returns: `VideoFile` wrapped in a `Result` type. - var videoFilePublisher: AnyPublisher, Never> { get } - - /// This property reflects the current state of audio input. - /// - /// If it returns `true`, the audio input is currently muted. - var isMuted: Bool { get } - - /// - Throws: `AespaError` if the video file path request fails, - /// orientation setting fails, or starting the recording fails. - /// - /// - Note: If `autoVideoOrientation` option is enabled, - /// it sets the orientation according to the current device orientation. - func startRecordingWithError() throws - - /// Stops the ongoing video recording session and attempts to add the video file to the album. - /// - /// Supporting `async`, you can use this method in Swift Concurrency's context - /// - /// - Throws: `AespaError` if stopping the recording fails. - @discardableResult func stopRecordingWithError() async throws -> VideoFile - - /// Mutes the audio input for the video recording session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func muteWithError() throws -> VideoContextType - - /// Unmutes the audio input for the video recording session. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func unmuteWithError() throws -> VideoContextType - - /// Sets the stabilization mode for the video recording session. - /// - /// - Parameter mode: An `AVCaptureVideoStabilizationMode` value - /// indicating the stabilization mode to be set. - /// - /// - Throws: `AespaError` if the session fails to run the tuner. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> VideoContextType - - /// Sets the torch mode and level for the video recording session. - /// - /// - Parameters: - /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). - /// - level: The desired torch level as a Float between 0.0 and 1.0. - /// - /// - Returns: Returns self, allowing additional settings to be configured. - /// - /// - Throws: Throws an error if setting the torch mode or level fails. - /// - /// - Note: This function might throw an error if the torch mode is not supported, - /// or the specified level is not within the acceptable range. - @discardableResult func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> VideoContextType - - /// Fetches a list of recorded video files. - /// The number of files fetched is controlled by the limit parameter. - /// - /// It is recommended not to be called in main thread. - /// - /// - Parameter limit: An integer specifying the maximum number of video files to fetch. - /// - /// - Returns: An array of `VideoFile` instances. - func fetchVideoFiles(limit: Int) -> [VideoFile] -} - -// MARK: Non-throwing methods -// These methods encapsulate error handling within the method itself rather than propagating it to the caller. -// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. -// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. -extension VideoContext { - /// Starts the recording of a video session. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Note: If auto video orientation is enabled, - /// it sets the orientation according to the current device orientation. - public func startRecording(errorHandler: ErrorHandler? = nil) { - do { - try startRecordingWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - } - - /// Stops the current video recording session and attempts to save the video file to the album. - /// - /// Any errors that occur during the process are captured and logged. - /// - /// - Parameter completionHandler: A closure that handles the result of the operation. - /// It's called with a `Result` object that encapsulates either a `VideoFile` instance. - /// - /// - Note: It is recommended to use the ``stopRecording() async throws`` - /// for more straightforward error handling. - public func stopRecording( - _ completionHandler: @escaping (Result) -> Void = { _ in } - ) { - Task(priority: .utility) { - do { - let videoFile = try await self.stopRecordingWithError() - return completionHandler(.success(videoFile)) - } catch let error { - Logger.log(error: error) - return completionHandler(.failure(error)) - } - } - } - - /// Mutes the audio input for the video recording session. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func mute(errorHandler: ErrorHandler? = nil) -> VideoContextType { - do { - return try self.muteWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingVideoContext - } - - /// Unmutes the audio input for the video recording session. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func unmute(errorHandler: ErrorHandler? = nil) -> VideoContextType { - do { - return try self.unmuteWithError() - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - - return underlyingVideoContext - } - } - - /// Sets the stabilization mode for the video recording session. - /// - /// - Parameter mode: An `AVCaptureVideoStabilizationMode` value - /// indicating the stabilization mode to be set. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Returns: `AespaVideoContext`, for chaining calls. - @discardableResult - public func setStabilization( - mode: AVCaptureVideoStabilizationMode, - errorHandler: ErrorHandler? = nil - ) -> VideoContextType { - do { - return try self.setStabilizationWithError(mode: mode) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingVideoContext - } - - - /// Sets the torch mode and level for the video recording session. - /// - /// If an error occurs during the operation, the error is logged. - /// - /// - Parameters: - /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). - /// - level: The desired torch level as a Float between 0.0 and 1.0. - /// - /// - Returns: Returns self, allowing additional settings to be configured. - /// - /// - Note: This function might throw an error if the torch mode is not supported, - /// or the specified level is not within the acceptable range. - @discardableResult - public func setTorch( - mode: AVCaptureDevice.TorchMode, - level: Float, - errorHandler: ErrorHandler? = nil - ) -> VideoContextType { - do { - return try self.setTorchWithError(mode: mode, level: level) - } catch let error { - errorHandler?(error) - Logger.log(error: error) // Logs any errors encountered during the operation - } - - return underlyingVideoContext - } - - /// Fetches a list of recorded video files. - /// The number of files fetched is controlled by the limit parameter. - /// - /// It is recommended not to be called in main thread. - /// - /// - Parameter limit: An integer specifying the maximum number of video files to fetch. - /// If the limit is set to 0 (default), all recorded video files will be fetched. - /// - Returns: An array of `VideoFile` instances. - public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { - fetchVideoFiles(limit: limit) - } -} - - -public protocol PhotoContext { - associatedtype PhotoContextType: PhotoContext - - var underlyingPhotoContext: PhotoContextType { get } - - /// The publisher that broadcasts the result of a photo file operation. - /// It emits a `Result` object containing a `PhotoFile` on success or an `Error` on failure, - /// and never fails itself. This can be used to observe the photo capturing process and handle - /// the results asynchronously. - var photoFilePublisher: AnyPublisher, Never> { get } - - /// A variable holding current `AVCapturePhotoSettings` - var currentSetting: AVCapturePhotoSettings { get } - - /// Asynchronously captures a photo with the specified `AVCapturePhotoSettings`. - /// - /// The captured photo is flattened into a `Data` object, and then added to an album. A `PhotoFile` - /// object is then created using the raw photo data and the current date. This `PhotoFile` is sent - /// through the `photoFileBufferSubject` and then returned to the caller. - /// - /// If any part of this process fails, an `AespaError` is thrown. - /// - /// - Returns: A `PhotoFile` object representing the captured photo. - /// - Throws: An `AespaError` if there is an issue capturing the photo, - /// flattening it into a `Data` object, or adding it to the album. - @discardableResult func capturePhotoWithError() async throws -> PhotoFile - - /// Sets the flash mode for the camera and returns the updated `AespaPhotoContext` instance. - /// The returned instance can be used for chaining configuration. - /// - /// - Parameter mode: The `AVCaptureDevice.FlashMode` to set for the camera. - /// - Returns: The updated `AespaPhotoContext` instance. - @discardableResult func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> PhotoContextType - - /// Sets the red eye reduction mode for the camera and returns the updated `AespaPhotoContext` instance. - /// The returned instance can be used for chaining configuration. - /// - /// - Parameter enabled: A boolean indicating whether the red eye reduction should be enabled or not. - /// - Returns: The updated `AespaPhotoContext` instance. - @discardableResult func redEyeReduction(enabled: Bool) -> PhotoContextType - - /// Updates the photo capturing settings for the `AespaPhotoContext` instance. - /// - /// - Note: This method can be potentially risky to use, as it overrides the existing capture settings. - /// Not all `AVCapturePhotoSettings` are supported, for instance, live photos are not supported. - /// It's recommended to understand the implications of the settings before applying them. - /// - /// - Parameter setting: The `AVCapturePhotoSettings` to use for photo capturing. - func custom(_ setting: AVCapturePhotoSettings) -> PhotoContextType - - // MARK: - Utilities - /// Fetches a list of captured photo files. - /// The number of files fetched is controlled by the limit parameter. - /// - /// It is recommended not to be called in main thread. - /// - /// - Parameter limit: An integer specifying the maximum number of files to fetch. - /// - /// - Returns: An array of `PhotoFile` instances. - func fetchPhotoFiles(limit: Int) -> [PhotoFile] -} - -// MARK: Non-throwing methods -// These methods encapsulate error handling within the method itself rather than propagating it to the caller. -// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. -// Although it simplifies error handling, this approach may not be recommended because it offers less control to callers. -// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. -extension PhotoContext { - /// Asynchronously captures a photo using the specified `AVCapturePhotoSettings`. - /// - /// If the photo capture is successful, it will return a `PhotoFile` - /// object through the provided completion handler. - /// - /// In case of an error during the photo capture process, the error will be logged and also returned via - /// the completion handler. - /// - /// - Parameters: - /// - completionHandler: A closure to be invoked once the photo capture process is completed. This - /// closure takes a `Result` type where `Success` contains a `PhotoFile` object and - /// `Failure` contains an `Error` object. By default, the closure does nothing. - /// - public func capturePhoto( - _ completionHandler: @escaping (Result) -> Void = { _ in } - ) { - Task(priority: .utility) { - do { - let photoFile = try await self.capturePhotoWithError() - return completionHandler(.success(photoFile)) - } catch let error { - Logger.log(error: error) - return completionHandler(.failure(error)) - } - } - } - - /// Fetches a list of captured photo files. - /// The number of files fetched is controlled by the limit parameter. - /// - /// It is recommended not to be called in main thread. - /// - /// - Parameter limit: An integer specifying the maximum number of files to fetch. - /// If the limit is set to 0 (default), all recorded video files will be fetched. - /// - Returns: An array of `PhotoFile` instances. - public func fetchPhotoFiles(limit: Int = 0) -> [PhotoFile] { - fetchPhotoFiles(limit: limit) - } -} diff --git a/Sources/Aespa/Core/AespaCoreRecorder.swift b/Sources/Aespa/Core/AespaCoreRecorder.swift index bf1c2b9..82391ac 100644 --- a/Sources/Aespa/Core/AespaCoreRecorder.swift +++ b/Sources/Aespa/Core/AespaCoreRecorder.swift @@ -21,23 +21,29 @@ class AespaCoreRecorder: NSObject { self.core = core } - func run(processor: T) throws { + func run(processor: T, _ onComplete: @escaping CompletionHandler) { guard let output = core.movieFileOutput else { - throw AespaError.session(reason: .cannotFindConnection) + onComplete(.failure(AespaError.session(reason: .cannotFindConnection))) + return } - try processor.process(output) + do { + try processor.process(output) + onComplete(.success(())) + } catch { + onComplete(.failure(error)) + } } } extension AespaCoreRecorder { - func startRecording(in filePath: URL) throws { - try run(processor: StartRecordProcessor(filePath: filePath, delegate: self)) + func startRecording(in filePath: URL, _ onComplete: @escaping CompletionHandler) { + run(processor: StartRecordProcessor(filePath: filePath, delegate: self), onComplete) } - + func stopRecording() async throws -> URL { - try run(processor: FinishRecordProcessor()) - + run(processor: FinishRecordProcessor(), { _ in }) + return try await withCheckedThrowingContinuation { continuation in fileIOResultSubsciption = fileIOResultSubject.sink { _ in // Do nothing on completion; we're only interested in values. diff --git a/Sources/Aespa/Core/AespaCoreSession.swift b/Sources/Aespa/Core/AespaCoreSession.swift index 44ed487..55d5b90 100644 --- a/Sources/Aespa/Core/AespaCoreSession.swift +++ b/Sources/Aespa/Core/AespaCoreSession.swift @@ -12,46 +12,97 @@ import AVFoundation class AespaCoreSession: AVCaptureSession { var option: AespaOption - + private var workQueue = OperationQueue() + init(option: AespaOption) { self.option = option + + workQueue.qualityOfService = .background + workQueue.maxConcurrentOperationCount = 1 + workQueue.isSuspended = true } - - func run(_ tuner: T) throws { - if tuner.needTransaction { self.beginConfiguration() } - defer { - if tuner.needTransaction { self.commitConfiguration() } + + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { + do { + if tuner.needTransaction { self.beginConfiguration() } + defer { + if tuner.needTransaction { self.commitConfiguration() } + onComplete(.success(())) + } + + try tuner.tune(self) + } catch let error { + Logger.log(error: error, message: "in \(tuner)") + onComplete(.failure(error)) + } } - - try tuner.tune(self) } - - func run(_ tuner: T) throws { - guard let device = self.videoDeviceInput?.device else { - throw AespaError.device(reason: .invalid) - } - - if tuner.needLock { try device.lockForConfiguration() } - defer { - if tuner.needLock { device.unlockForConfiguration() } + + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { + do { + guard let device = self.videoDeviceInput?.device else { + throw AespaError.device(reason: .invalid) + } + + if tuner.needLock { try device.lockForConfiguration() } + defer { + if tuner.needLock { device.unlockForConfiguration() } + onComplete(.success(())) + } + + try tuner.tune(device) + } catch let error { + Logger.log(error: error, message: "in \(tuner)") + onComplete(.failure(error)) + } } - - try tuner.tune(device) } - - func run(_ tuner: T) throws { - guard let connection = self.connections.first else { - throw AespaError.session(reason: .cannotFindConnection) + + func run(_ tuner: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { + do { + guard let connection = self.connections.first else { + throw AespaError.session(reason: .cannotFindConnection) + } + + try tuner.tune(connection) + onComplete(.success(())) + } catch let error { + Logger.log(error: error, message: "in \(tuner)") + onComplete(.failure(error)) + } } - - try tuner.tune(connection) } - - func run(_ processor: T) throws { - guard let output = self.movieFileOutput else { - throw AespaError.session(reason: .cannotFindConnection) + + func run(_ processor: T, _ onComplete: @escaping CompletionHandler) { + workQueue.addOperation { + do { + guard let output = self.movieFileOutput else { + throw AespaError.session(reason: .cannotFindConnection) + } + + try processor.process(output) + onComplete(.success(())) + } catch let error { + Logger.log(error: error, message: "in \(processor)") + onComplete(.failure(error)) + } } + } + + func start() throws { + let session = self + + guard session.isRunning == false else { return } - try processor.process(output) + try session.addMovieInput() + try session.addMovieFileOutput() + try session.addCapturePhotoOutput() + session.startRunning() + + self.workQueue.isSuspended = false + Logger.log(message: "Session is configured successfully") } } diff --git a/Sources/Aespa/Context/AespaPhotoContext.swift b/Sources/Aespa/Core/Context/AespaPhotoContext.swift similarity index 84% rename from Sources/Aespa/Context/AespaPhotoContext.swift rename to Sources/Aespa/Core/Context/AespaPhotoContext.swift index 590058d..6b66bd5 100644 --- a/Sources/Aespa/Context/AespaPhotoContext.swift +++ b/Sources/Aespa/Core/Context/AespaPhotoContext.swift @@ -42,8 +42,7 @@ open class AespaPhotoContext { if let firstPhotoFile = fileManager.fetchPhoto( albumName: option.asset.albumName, subDirectoryName: option.asset.photoDirectoryName, - count: 1).first - { + count: 1).first { photoFileBufferSubject.send(.success(firstPhotoFile)) } } @@ -68,34 +67,22 @@ extension AespaPhotoContext: PhotoContext { photoSetting } - public func capturePhotoWithError() async throws -> PhotoFile { - let setting = AVCapturePhotoSettings(from: photoSetting) - let rawPhotoAsset = try await camera.capture(setting: setting) - - guard let rawPhotoData = rawPhotoAsset.fileDataRepresentation() else { - throw AespaError.file(reason: .unableToFlatten) + public func capturePhoto( + _ completionHandler: @escaping (Result) -> Void + ) { + Task(priority: .utility) { + do { + let photoFile = try await self.capturePhotoWithError() + completionHandler(.success(photoFile)) + } catch let error { + Logger.log(error: error) + completionHandler(.failure(error)) + } } - - let filePath = try FilePathProvider.requestFilePath( - from: fileManager.systemFileManager, - directoryName: option.asset.albumName, - subDirectoryName: option.asset.photoDirectoryName, - fileName: option.asset.fileNameHandler()) - - try fileManager.write(data: rawPhotoData, to: filePath) - try await albumManager.addToAlbum(imageData: rawPhotoData) - - let photoFile = PhotoFileGenerator.generate( - with: filePath, - date: Date()) - - photoFileBufferSubject.send(.success(photoFile)) - - return photoFile } @discardableResult - public func setFlashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { + public func flashMode(to mode: AVCaptureDevice.FlashMode) -> AespaPhotoContext { photoSetting.flashMode = mode return self } @@ -118,3 +105,31 @@ extension AespaPhotoContext: PhotoContext { count: limit) } } + +private extension AespaPhotoContext { + func capturePhotoWithError() async throws -> PhotoFile { + let setting = AVCapturePhotoSettings(from: photoSetting) + let rawPhotoAsset = try await camera.capture(setting: setting) + + guard let rawPhotoData = rawPhotoAsset.fileDataRepresentation() else { + throw AespaError.file(reason: .unableToFlatten) + } + + let filePath = try FilePathProvider.requestFilePath( + from: fileManager.systemFileManager, + directoryName: option.asset.albumName, + subDirectoryName: option.asset.photoDirectoryName, + fileName: option.asset.fileNameHandler()) + + try fileManager.write(data: rawPhotoData, to: filePath) + try await albumManager.addToAlbum(imageData: rawPhotoData) + + let photoFile = PhotoFileGenerator.generate( + with: filePath, + date: Date()) + + photoFileBufferSubject.send(.success(photoFile)) + + return photoFile + } +} diff --git a/Sources/Aespa/Context/AespaVideoContext.swift b/Sources/Aespa/Core/Context/AespaVideoContext.swift similarity index 52% rename from Sources/Aespa/Context/AespaVideoContext.swift rename to Sources/Aespa/Core/Context/AespaVideoContext.swift index dfdab58..a705314 100644 --- a/Sources/Aespa/Context/AespaVideoContext.swift +++ b/Sources/Aespa/Core/Context/AespaVideoContext.swift @@ -24,6 +24,7 @@ public class AespaVideoContext { private let videoFileBufferSubject: CurrentValueSubject?, Never> + /// A Boolean value that indicates whether the session is currently recording video. public var isRecording: Bool init( @@ -49,8 +50,7 @@ public class AespaVideoContext { if let firstVideoFile = fileManager.fetchVideo( albumName: option.asset.albumName, subDirectoryName: option.asset.videoDirectoryName, - count: 1).first - { + count: 1).first { videoFileBufferSubject.send(.success(firstVideoFile)) } } @@ -60,10 +60,11 @@ extension AespaVideoContext: VideoContext { public var underlyingVideoContext: AespaVideoContext { self } - + public var isMuted: Bool { coreSession.audioDeviceInput == nil } + public var videoFilePublisher: AnyPublisher, Never> { videoFileBufferSubject.handleEvents(receiveOutput: { status in if case .failure(let error) = status { @@ -74,66 +75,94 @@ extension AespaVideoContext: VideoContext { .eraseToAnyPublisher() } - public func startRecordingWithError() throws { - let fileName = option.asset.fileNameHandler() - let filePath = try FilePathProvider.requestFilePath( - from: fileManager.systemFileManager, - directoryName: option.asset.albumName, - subDirectoryName: option.asset.videoDirectoryName, - fileName: fileName, - extension: "mp4") - - if option.session.autoVideoOrientationEnabled { - try commonContext.setOrientationWithError(to: UIDevice.current.orientation.toVideoOrientation) + public func startRecording(_ onComplete: @escaping CompletionHandler = { _ in }) { + do { + let fileName = option.asset.fileNameHandler() + let filePath = try FilePathProvider.requestFilePath( + from: fileManager.systemFileManager, + directoryName: option.asset.albumName, + subDirectoryName: option.asset.videoDirectoryName, + fileName: fileName, + extension: "mp4") + + if option.session.autoVideoOrientationEnabled { + commonContext.orientation(to: UIDevice.current.orientation.toVideoOrientation, onComplete) + } + + recorder.startRecording(in: filePath, onComplete) + isRecording = true + } catch let error { + onComplete(.failure(error)) } - - try recorder.startRecording(in: filePath) } - public func stopRecordingWithError() async throws -> VideoFile { - let videoFilePath = try await recorder.stopRecording() - let videoFile = VideoFileGenerator.generate(with: videoFilePath, date: Date()) + public func stopRecording(_ onCompelte: @escaping ResultHandler = { _ in }) { + Task(priority: .utility) { + do { + let videoFilePath = try await recorder.stopRecording() + let videoFile = VideoFileGenerator.generate(with: videoFilePath, date: Date()) - try await albumManager.addToAlbum(filePath: videoFilePath) - videoFileBufferSubject.send(.success(videoFile)) + try await albumManager.addToAlbum(filePath: videoFilePath) + videoFileBufferSubject.send(.success(videoFile)) - return videoFile + isRecording = false + onCompelte(.success(videoFile)) + } catch let error { + Logger.log(error: error) + onCompelte(.failure(error)) + } + } } @discardableResult - public func muteWithError() throws -> AespaVideoContext { + public func mute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: true) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) + return self } @discardableResult - public func unmuteWithError() throws -> AespaVideoContext { + public func unmute(_ onComplete: @escaping CompletionHandler = { _ in }) -> AespaVideoContext { let tuner = AudioTuner(isMuted: false) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) + return self } @discardableResult - public func setStabilizationWithError(mode: AVCaptureVideoStabilizationMode) throws -> AespaVideoContext { + public func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { let tuner = VideoStabilizationTuner(stabilzationMode: mode) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) + return self } @discardableResult - public func setTorchWithError(mode: AVCaptureDevice.TorchMode, level: Float) throws -> AespaVideoContext { + public func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { let tuner = TorchTuner(level: level, torchMode: mode) - try coreSession.run(tuner) + coreSession.run(tuner, onComplete) + return self } - - public func customizewWithError(_ tuner: T) throws -> AespaVideoContext { - try coreSession.run(tuner) + + public func customize( + _ tuner: T, + _ onComplete: @escaping CompletionHandler = { _ in } + ) -> AespaVideoContext { + coreSession.run(tuner, onComplete) + return self } - public func fetchVideoFiles(limit: Int) -> [VideoFile] { + public func fetchVideoFiles(limit: Int = 0) -> [VideoFile] { return fileManager.fetchVideo( albumName: option.asset.albumName, subDirectoryName: option.asset.videoDirectoryName, diff --git a/Sources/Aespa/Core/Context/Context.swift b/Sources/Aespa/Core/Context/Context.swift new file mode 100644 index 0000000..ea76544 --- /dev/null +++ b/Sources/Aespa/Core/Context/Context.swift @@ -0,0 +1,313 @@ +// +// File.swift +// +// +// Created by 이영빈 on 2023/06/24. +// + +import UIKit +import Combine +import Foundation +import AVFoundation + +/// A type representing a closure that handles a completion event with potential errors. +public typealias CompletionHandler = (Result) -> Void + +/// A type representing a closure that handles a result of an operation +/// that produces a value of type `T`, with potential errors. +public typealias ResultHandler = (Result) -> Void + +/// A protocol that defines the common behaviors and properties that all context types must implement. +/// +/// It includes methods to control the quality, position, orientation, and auto-focusing behavior +/// of the session. It also includes the ability to adjust the zoom level of the session. +public protocol CommonContext { + /// + associatedtype CommonContextType: CommonContext & VideoContext & PhotoContext + /// + var underlyingCommonContext: CommonContextType { get } + + /// Sets the quality preset for the video recording session. + /// + /// - Parameters: + /// - preset: An `AVCaptureSession.Preset` value indicating the quality preset to be set. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func quality( + to preset: AVCaptureSession.Preset, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType + + /// Sets the camera position for the video recording session. + /// + /// It refers to `AespaOption.Session.cameraDevicePreference` when choosing the camera device. + /// + /// - Parameters: + /// - position: An `AVCaptureDevice.Position` value indicating the camera position to be set. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func position( + to position: AVCaptureDevice.Position, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType + + /// Sets the orientation for the session. + /// + /// - Parameters: + /// - orientation: An `AVCaptureVideoOrientation` value indicating the orientation to be set. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + /// + /// - Note: It sets the orientation of the video you are recording, + /// not the orientation of the `AVCaptureVideoPreviewLayer`. + @discardableResult func orientation( + to orientation: AVCaptureVideoOrientation, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType + + /// Sets the autofocusing mode for the video recording session. + /// + /// - Parameters: + /// - mode: The focus mode(`AVCaptureDevice.FocusMode`) for the session. + /// - point: The point in the camera's field of view that the auto focus should prioritize. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func focus( + mode: AVCaptureDevice.FocusMode, + point: CGPoint?, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType + + /// Sets the zoom factor for the video recording session. + /// + /// - Parameters: + /// - factor: A `CGFloat` value indicating the zoom factor to be set. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func zoom(factor: CGFloat, _ onComplete: @escaping CompletionHandler) -> CommonContextType + + /// Changes monitoring status. + /// + /// - Parameters: + /// - enabled: A boolean value to set monitoring status. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func changeMonitoring( + enabled: Bool, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType + + /// This function provides a way to use a custom tuner to modify the current session. + /// The tuner must conform to `AespaSessionTuning`. + /// + /// - Parameters: + /// - tuner: An instance that conforms to `AespaSessionTuning`. + /// - onComplete: A closure to be executed if the session fails to run the tuner. + /// + /// - Returns: `AespaVideoContext`, for chaining calls. + @discardableResult func custom( + _ tuner: T, + _ onComplete: @escaping CompletionHandler + ) -> CommonContextType +} + +/// A protocol that defines the behaviors and properties specific to the video context. +/// +/// It adds video-specific capabilities such as checking if +/// the session is currently recording or muted, and controlling video recording, +/// stabilization, torch mode, and fetching recorded video files. +public protocol VideoContext { + /// + associatedtype VideoContextType: VideoContext + /// + var underlyingVideoContext: VideoContextType { get } + + /// A Boolean value that indicates whether the session is currently recording video. + var isRecording: Bool { get } + + /// This publisher is responsible for emitting `VideoFile` objects resulting from completed recordings. + /// + /// In the case of an error, it logs the error before forwarding it wrapped in a `Result.failure`. + /// If you don't want to show logs, set `enableLogging` to `false` from `AespaOption.Log` + /// + /// - Returns: `VideoFile` wrapped in a `Result` type. + var videoFilePublisher: AnyPublisher, Never> { get } + + /// This property reflects the current state of audio input. + /// + /// If it returns `true`, the audio input is currently muted. + var isMuted: Bool { get } + + /// Starts the video recording session. + /// + /// - Parameter onComplete: A closure to handle any errors that occur during recording. + /// + /// - Note: If `autoVideoOrientation` option is enabled, + /// it sets the orientation according to the current device orientation. + func startRecording(_ onComplete: @escaping CompletionHandler) + + /// Stops the current recording session and saves the video file. + /// + /// Once the recording session is successfully stopped and the video file is saved, + /// this function invokes a completion handler with the resulting `VideoFile` instance or an error. + /// + /// - Parameter onComplete: A closure to be called after the recording has stopped + /// and the video file is saved or failed. + func stopRecording(_ onComplete: @escaping (Result) -> Void) + + /// Mutes the audio input for the video recording session. + /// + /// - Parameter onComplete: A closure to handle any errors that occur when muting the audio. + /// + /// - Returns: The modified `VideoContextType` for chaining calls. + @discardableResult + func mute(_ onComplete: @escaping CompletionHandler) -> VideoContextType + + /// Unmutes the audio input for the video recording session. + /// + /// - Parameter onComplete: A closure to handle any errors that occur when unmuting the audio. + /// + /// - Returns: The modified `VideoContextType` for chaining calls. + @discardableResult + func unmute(_ onComplete: @escaping CompletionHandler) -> VideoContextType + + /// Sets the stabilization mode for the video recording session. + /// + /// - Parameters: + /// - mode: An `AVCaptureVideoStabilizationMode` value indicating the stabilization mode to be set. + /// - onComplete: A closure to handle any errors that occur when setting the stabilization mode. + /// + /// - Returns: The modified `VideoContextType` for chaining calls. + @discardableResult + func stabilization( + mode: AVCaptureVideoStabilizationMode, + _ onComplete: @escaping CompletionHandler + ) -> VideoContextType + + /// Sets the torch mode and level for the video recording session. + /// + /// - Parameters: + /// - mode: The desired torch mode (AVCaptureDevice.TorchMode). + /// - level: The desired torch level as a Float between 0.0 and 1.0. + /// + /// - Returns: Returns self, allowing additional settings to be configured. + /// + /// - Note: This function might throw an error if the torch mode is not supported, + /// or the specified level is not within the acceptable range. + @discardableResult + func torch( + mode: AVCaptureDevice.TorchMode, + level: Float, + _ onComplete: @escaping CompletionHandler + ) -> VideoContextType + + /// Fetches a list of recorded video files. + /// The number of files fetched is controlled by the limit parameter. + /// + /// It is recommended not to be called in the main thread. + /// + /// - Parameters: + /// - limit: An integer specifying the maximum number of video files to fetch. + /// Fetch all files if `limit` is zero(`0`) + /// + /// - Returns: An array of `VideoFile` instances. + func fetchVideoFiles(limit: Int) -> [VideoFile] +} + +/// A protocol that defines the behaviors and properties specific to the photo context. +/// +/// It adds photo-specific capabilities such as accessing +/// current photo settings, controlling flash mode, and red-eye reduction, capturing +/// photo, and fetching captured photo files. +public protocol PhotoContext { + /// + associatedtype PhotoContextType: PhotoContext + /// + var underlyingPhotoContext: PhotoContextType { get } + + /// The publisher that broadcasts the result of a photo file operation. + /// It emits a `Result` object containing a `PhotoFile` on success or an `Error` on failure, + /// and never fails itself. This can be used to observe the photo capturing process and handle + /// the results asynchronously. + var photoFilePublisher: AnyPublisher, Never> { get } + + /// A variable holding current `AVCapturePhotoSettings` + var currentSetting: AVCapturePhotoSettings { get } + + /// Asynchronously captures a photo using the specified `AVCapturePhotoSettings`. + /// + /// If the photo capture is successful, it will return a `PhotoFile` + /// object through the provided completion handler. + /// + /// In case of an error during the photo capture process, the error will be logged and also returned via + /// the completion handler. + /// + /// - Parameters: + /// - completionHandler: A closure to be invoked once the photo capture process is completed. This + /// closure takes a `Result` type where `Success` contains a `PhotoFile` object and + /// `Failure` contains an `Error` object. By default, the closure does nothing. + /// + func capturePhoto( + _ completionHandler: @escaping (Result) -> Void + ) + + /// Sets the flash mode for the camera and returns the updated `AespaPhotoContext` instance. + /// The returned instance can be used for chaining configuration. + /// + /// - Parameter mode: The `AVCaptureDevice.FlashMode` to set for the camera. + /// - Returns: The updated `AespaPhotoContext` instance. + @discardableResult func flashMode(to mode: AVCaptureDevice.FlashMode) -> PhotoContextType + + /// Sets the red eye reduction mode for the camera and returns the updated `AespaPhotoContext` instance. + /// The returned instance can be used for chaining configuration. + /// + /// - Parameter enabled: A boolean indicating whether the red eye reduction should be enabled or not. + /// - Returns: The updated `AespaPhotoContext` instance. + @discardableResult func redEyeReduction(enabled: Bool) -> PhotoContextType + + /// Updates the photo capturing settings for the `AespaPhotoContext` instance. + /// + /// - Note: This method can be potentially risky to use, as it overrides the existing capture settings. + /// Not all `AVCapturePhotoSettings` are supported, for instance, live photos are not supported. + /// It's recommended to understand the implications of the settings before applying them. + /// + /// - Parameter setting: The `AVCapturePhotoSettings` to use for photo capturing. + func custom(_ setting: AVCapturePhotoSettings) -> PhotoContextType + + // MARK: - Utilities + /// Fetches a list of captured photo files. + /// The number of files fetched is controlled by the limit parameter. + /// + /// It is recommended not to be called in main thread. + /// + /// - Parameter limit: An integer specifying the maximum number of files to fetch. + /// + /// - Returns: An array of `PhotoFile` instances. + func fetchPhotoFiles(limit: Int) -> [PhotoFile] +} + +// MARK: Non-throwing methods +// These methods encapsulate error handling within the method itself rather than propagating it to the caller. +// This means any errors that occur during the execution of these methods will be caught and logged, not thrown. +// Although it simplifies error handling, this approach may not be recommended because +// it offers less control to callers. +// Developers are encouraged to use methods that throw errors, to gain finer control over error handling. +extension PhotoContext { + /// Fetches a list of captured photo files. + /// The number of files fetched is controlled by the limit parameter. + /// + /// It is recommended not to be called in main thread. + /// + /// - Parameter limit: An integer specifying the maximum number of files to fetch. + /// If the limit is set to 0 (default), all recorded video files will be fetched. + /// - Returns: An array of `PhotoFile` instances. + public func fetchPhotoFiles(limit: Int = 0) -> [PhotoFile] { + fetchPhotoFiles(limit: limit) + } +} diff --git a/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift index 7b04560..3eedc79 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureConnection+AespaRepresentable.swift @@ -12,16 +12,16 @@ protocol AespaCaptureConnectionRepresentable { var videoOrientation: AVCaptureVideoOrientation { get set } var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode { get set } - func setOrientation(to orientation: AVCaptureVideoOrientation) - func setStabilizationMode(to mode: AVCaptureVideoStabilizationMode) + func orientation(to orientation: AVCaptureVideoOrientation) + func stabilizationMode(to mode: AVCaptureVideoStabilizationMode) } extension AVCaptureConnection: AespaCaptureConnectionRepresentable { - func setOrientation(to orientation: AVCaptureVideoOrientation) { + func orientation(to orientation: AVCaptureVideoOrientation) { self.videoOrientation = orientation } - func setStabilizationMode(to mode: AVCaptureVideoStabilizationMode) { + func stabilizationMode(to mode: AVCaptureVideoStabilizationMode) { self.preferredVideoStabilizationMode = mode } } diff --git a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift index 8cc66e9..cb7cccf 100644 --- a/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AVCaptureDevice+AespaRepresentable.swift @@ -11,21 +11,23 @@ import AVFoundation protocol AespaCaptureDeviceRepresentable { var hasTorch: Bool { get } var focusMode: AVCaptureDevice.FocusMode { get set } + var isSubjectAreaChangeMonitoringEnabled: Bool { get set } var flashMode: AVCaptureDevice.FlashMode { get set } var videoZoomFactor: CGFloat { get set } var maxResolution: Double? { get } - + func isFocusModeSupported(_ focusMode: AVCaptureDevice.FocusMode) -> Bool - func setZoomFactor(_ factor: CGFloat) - func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode) - func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) + func zoomFactor(_ factor: CGFloat) + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws + func torchMode(_ torchMode: AVCaptureDevice.TorchMode) + func enableMonitoring(_ enabled: Bool) func setTorchModeOn(level torchLevel: Float) throws } extension AVCaptureDevice: AespaCaptureDeviceRepresentable { - func setTorchMode(_ torchMode: TorchMode) { + func torchMode(_ torchMode: TorchMode) { switch torchMode { case .off: self.torchMode = .off @@ -37,12 +39,30 @@ extension AVCaptureDevice: AespaCaptureDeviceRepresentable { self.torchMode = .off } } + + func enableMonitoring(_ enabled: Bool) { + self.isSubjectAreaChangeMonitoringEnabled = enabled + } - func setFocusMode(_ focusMode: FocusMode) { - self.focusMode = focusMode + func setFocusMode(_ focusMode: AVCaptureDevice.FocusMode, point: CGPoint?) throws { + if isAdjustingFocus { + throw AespaError.device(reason: .busy) + } + + if isFocusModeSupported(focusMode) { + self.focusMode = focusMode + } else { + throw AespaError.device(reason: .notSupported) + } + + if isFocusPointOfInterestSupported { + if let point { self.focusPointOfInterest = point } + } else { + throw AespaError.device(reason: .notSupported) + } } - func setZoomFactor(_ factor: CGFloat) { + func zoomFactor(_ factor: CGFloat) { self.videoZoomFactor = factor } diff --git a/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift b/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift index dddf3b3..98b199b 100644 --- a/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift +++ b/Sources/Aespa/Core/Representable/AespaCoreSession+AespaRepresentable.swift @@ -60,13 +60,13 @@ public protocol AespaCoreSessionRepresentable { /// Sets the position of the camera. /// Throws an error if the operation fails. - func setCameraPosition( + func cameraPosition( to position: AVCaptureDevice.Position, device deviceType: AVCaptureDevice.DeviceType? ) throws /// Sets the video quality preset. - func setVideoQuality(to preset: AVCaptureSession.Preset) throws + func videoQuality(to preset: AVCaptureSession.Preset) throws } extension AespaCoreSession: AespaCoreSessionRepresentable { @@ -187,7 +187,7 @@ extension AespaCoreSession: AespaCoreSessionRepresentable { } // MARK: - Option related - func setCameraPosition( + func cameraPosition( to position: AVCaptureDevice.Position, device deviceType: AVCaptureDevice.DeviceType? ) throws { @@ -217,7 +217,7 @@ extension AespaCoreSession: AespaCoreSessionRepresentable { } } - func setVideoQuality(to preset: AVCaptureSession.Preset) { + func videoQuality(to preset: AVCaptureSession.Preset) { let session = self session.sessionPreset = preset diff --git a/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift b/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift index 78f6b47..26f079b 100644 --- a/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift +++ b/Sources/Aespa/Processor/Capture/CapturePhotoProcessor.swift @@ -11,11 +11,6 @@ struct CapturePhotoProcessor: AespaCapturePhotoOutputProcessing { let setting: AVCapturePhotoSettings let delegate: AVCapturePhotoCaptureDelegate - init(setting: AVCapturePhotoSettings, delegate: AVCapturePhotoCaptureDelegate) { - self.setting = setting - self.delegate = delegate - } - func process(_ output: T) throws where T: AespaPhotoOutputRepresentable { guard output.getConnection(with: .video) != nil else { throw AespaError.session(reason: .cannotFindConnection) diff --git a/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift b/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift index e64c697..f38e25a 100644 --- a/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift +++ b/Sources/Aespa/Tuner/Connection/VideoOrientationTuner.swift @@ -11,6 +11,6 @@ struct VideoOrientationTuner: AespaConnectionTuning { var orientation: AVCaptureVideoOrientation func tune(_ connection: T) throws { - connection.setOrientation(to: orientation) + connection.orientation(to: orientation) } } diff --git a/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift b/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift index ad555b2..5877f78 100644 --- a/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift +++ b/Sources/Aespa/Tuner/Connection/VideoStabilizationTuner.swift @@ -11,6 +11,6 @@ struct VideoStabilizationTuner: AespaConnectionTuning { var stabilzationMode: AVCaptureVideoStabilizationMode func tune(_ connection: T) { - connection.setStabilizationMode(to: stabilzationMode) + connection.stabilizationMode(to: stabilzationMode) } } diff --git a/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift new file mode 100644 index 0000000..6ecee84 --- /dev/null +++ b/Sources/Aespa/Tuner/Device/ChangeMonitoringTuner.swift @@ -0,0 +1,23 @@ +// +// ChangeMonitoringTuner.swift +// +// +// Created by 이영빈 on 2023/06/28. +// + +import Foundation +import AVFoundation + +struct ChangeMonitoringTuner: AespaDeviceTuning { + let needLock = true + + let enabled: Bool + + init(isSubjectAreaChangeMonitoringEnabled: Bool) { + self.enabled = isSubjectAreaChangeMonitoringEnabled + } + + func tune(_ device: T) throws { + device.enableMonitoring(enabled) + } +} diff --git a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift b/Sources/Aespa/Tuner/Device/FocusTuner.swift similarity index 58% rename from Sources/Aespa/Tuner/Device/AutoFocusTuner.swift rename to Sources/Aespa/Tuner/Device/FocusTuner.swift index 2b04f43..67677f4 100644 --- a/Sources/Aespa/Tuner/Device/AutoFocusTuner.swift +++ b/Sources/Aespa/Tuner/Device/FocusTuner.swift @@ -1,22 +1,25 @@ // -// AutoFocusTuner.swift +// FocusTuner.swift // // // Created by Young Bin on 2023/06/10. // +import UIKit import Foundation import AVFoundation -struct AutoFocusTuner: AespaDeviceTuning { +struct FocusTuner: AespaDeviceTuning { let needLock = true + let mode: AVCaptureDevice.FocusMode + let point: CGPoint? func tune(_ device: T) throws { guard device.isFocusModeSupported(mode) else { - throw AespaError.device(reason: .unsupported) + throw AespaError.device(reason: .notSupported) } - device.setFocusMode(mode) + try device.setFocusMode(mode, point: point) } } diff --git a/Sources/Aespa/Tuner/Device/TorchTuner.swift b/Sources/Aespa/Tuner/Device/TorchTuner.swift index 7a2cac5..4df7668 100644 --- a/Sources/Aespa/Tuner/Device/TorchTuner.swift +++ b/Sources/Aespa/Tuner/Device/TorchTuner.swift @@ -14,10 +14,10 @@ struct TorchTuner: AespaDeviceTuning { func tune(_ device: T) throws where T: AespaCaptureDeviceRepresentable { guard device.hasTorch else { - throw AespaError.device(reason: .unsupported) + throw AespaError.device(reason: .notSupported) } - device.setTorchMode(torchMode) + device.torchMode(torchMode) try device.setTorchModeOn(level: level) } } diff --git a/Sources/Aespa/Tuner/Device/ZoomTuner.swift b/Sources/Aespa/Tuner/Device/ZoomTuner.swift index 806c898..3177db2 100644 --- a/Sources/Aespa/Tuner/Device/ZoomTuner.swift +++ b/Sources/Aespa/Tuner/Device/ZoomTuner.swift @@ -12,6 +12,6 @@ struct ZoomTuner: AespaDeviceTuning { var zoomFactor: CGFloat func tune(_ device: T) { - device.setZoomFactor(zoomFactor) + device.zoomFactor(zoomFactor) } } diff --git a/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift b/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift index fd0da89..23237fc 100644 --- a/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift +++ b/Sources/Aespa/Tuner/Session/CameraPositionTuner.swift @@ -18,6 +18,6 @@ struct CameraPositionTuner: AespaSessionTuning { } func tune(_ session: T) throws { - try session.setCameraPosition(to: position, device: devicePreference) + try session.cameraPosition(to: position, device: devicePreference) } } diff --git a/Sources/Aespa/Tuner/Session/QualityTuner.swift b/Sources/Aespa/Tuner/Session/QualityTuner.swift index dc85bc3..ad9622d 100644 --- a/Sources/Aespa/Tuner/Session/QualityTuner.swift +++ b/Sources/Aespa/Tuner/Session/QualityTuner.swift @@ -12,6 +12,6 @@ struct QualityTuner: AespaSessionTuning { var videoQuality: AVCaptureSession.Preset func tune(_ session: T) throws { - try session.setVideoQuality(to: self.videoQuality) + try session.videoQuality(to: self.videoQuality) } } diff --git a/Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift b/Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift deleted file mode 100644 index 7fc5934..0000000 --- a/Sources/Aespa/Tuner/Session/SessionLaunchTuner.swift +++ /dev/null @@ -1,21 +0,0 @@ -// -// SessionLauncher.swift -// -// -// Created by 이영빈 on 2023/06/02. -// - -import AVFoundation - -struct SessionLaunchTuner: AespaSessionTuning { - let needTransaction = false - - func tune(_ session: T) throws { - guard session.isRunning == false else { return } - - try session.addMovieInput() - try session.addMovieFileOutput() - try session.addCapturePhotoOutput() - session.startRunning() - } -} diff --git a/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift b/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift index bfdf531..8d5400e 100644 --- a/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift +++ b/Sources/Aespa/Tuner/Session/SessionTerminationTuner.swift @@ -16,5 +16,7 @@ struct SessionTerminationTuner: AespaSessionTuning { session.removeAudioInput() session.removeMovieInput() session.stopRunning() + + Logger.log(message: "Session is terminated successfully") } } diff --git a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift index ae8309b..366e878 100644 --- a/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift +++ b/Sources/Aespa/Util/Extension/SwiftUI+Extension.swift @@ -10,44 +10,33 @@ import SwiftUI import AVFoundation public extension AespaSession { - /// A `SwiftUI` `View` that you use to display video as it is being captured by an input device. + /// This function is used to create a preview of the session. Doesn't offer any functionalities. + /// It returns a SwiftUI `View` that displays video as it is being captured. /// - /// - Parameter gravity: Define `AVLayerVideoGravity` for preview's orientation. - /// .resizeAspectFill` by default. + /// - Parameter gravity: Defines how the video is displayed within the layer bounds. + /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. /// - /// - Returns: `some UIViewRepresentable` which can coordinate other `View` components - func preview(gravity: AVLayerVideoGravity = .resizeAspectFill) -> some UIViewControllerRepresentable { - Preview(of: previewLayer, gravity: gravity) + /// - Returns: A SwiftUI `View` that displays the video feed. + func preview(gravity: AVLayerVideoGravity = .resizeAspectFill) -> some View { + return Preview(of: self, gravity: gravity) } -} - -private struct Preview: UIViewControllerRepresentable { - let previewLayer: AVCaptureVideoPreviewLayer - let gravity: AVLayerVideoGravity - - init( - of previewLayer: AVCaptureVideoPreviewLayer, - gravity: AVLayerVideoGravity - ) { - self.gravity = gravity - self.previewLayer = previewLayer - } - - func makeUIViewController(context: Context) -> UIViewController { - let viewController = UIViewController() - viewController.view.backgroundColor = .clear - - return viewController - } - - func updateUIViewController(_ uiViewController: UIViewController, context: Context) { - previewLayer.videoGravity = gravity - uiViewController.view.layer.addSublayer(previewLayer) - - previewLayer.frame = uiViewController.view.bounds - } - - func dismantleUIViewController(_ uiViewController: UIViewController, coordinator: ()) { - previewLayer.removeFromSuperlayer() + + /// This function is used to create an interactive preview of the session. + /// It returns a SwiftUI `View` that not only displays video as it is being captured, + /// but also allows user interaction like tap-to-focus, pinch zoom and double tap position change. + /// + /// - Parameter gravity: Defines how the video is displayed within the layer bounds. + /// .resizeAspectFill` by default, which scales the video to fill the layer bounds. + /// + /// - Returns: A SwiftUI `View` that displays the video feed and allows user interaction. + /// + /// - Warning: Tap-to-focus works only in `autoFocus` mode. + /// Make sure you're using this mode for the feature to work. + func interactivePreview( + gravity: AVLayerVideoGravity = .resizeAspectFill, + option: InteractivePreviewOption = .init() + ) -> InteractivePreview { + let internalPreview = Preview(of: self, gravity: gravity) + return InteractivePreview(internalPreview) } } diff --git a/Sources/Aespa/Util/Log/Logger.swift b/Sources/Aespa/Util/Log/Logger.swift index 1321fc3..2251eff 100644 --- a/Sources/Aespa/Util/Log/Logger.swift +++ b/Sources/Aespa/Util/Log/Logger.swift @@ -18,11 +18,18 @@ class Logger { static func log( error: Error, - file: String = (#file as NSString).lastPathComponent, + message: String = "", method: String = #function ) { if enableLogging { - print("[Aespa : error] [\(file) : \(method)] - \(error) : \(error.localizedDescription)") + let timestamp = Date().description + print( + "[⚠️ Aespa Error] \(timestamp) |" + + " Method: \(method) |" + + " Error: \(error) |" + + " Description: \(error.localizedDescription) |" + + " Message: \(message)" + ) } } } diff --git a/Sources/Aespa/View/InteractivePreview.swift b/Sources/Aespa/View/InteractivePreview.swift new file mode 100644 index 0000000..d1c8401 --- /dev/null +++ b/Sources/Aespa/View/InteractivePreview.swift @@ -0,0 +1,208 @@ +// +// InteractivePreview.swift +// +// +// Created by Young Bin on 2023/06/30. +// + +import Combine +import SwiftUI +import AVFoundation + +/// Struct that contains the options for customizing an `InteractivePreview`. +/// +/// The options include enabling or disabling certain interactive features such as changing position, +/// zooming, focusing, adjusting focus mode when moved, and showing a crosshair. +public struct InteractivePreviewOption { + /// Flag that controls whether the camera position can be changed. Default is `true`. + public var enableChangePosition = true + + /// Flag that controls whether zoom functionality is enabled. Default is `true`. + public var enableZoom = true + + /// Flag that controls whether focus can be manually adjusted. Default is `true`. + public var enableFocus = true + + /// Flag that controls whether the focus mode is changed when the camera is moved. Default is `true`. + public var enableChangeFocusModeWhenMoved = true + + /// Flag that controls whether a crosshair is displayed on the preview. Default is `true`. + public var enableShowingCrosshair = true + + /// Initialize the option + public init( + enableChangePosition: Bool = true, + enableZoom: Bool = true, + enableFocus: Bool = true, + enableChangeFocusModeWhenMoved: Bool = true, + enableShowingCrosshair: Bool = true + ) { + self.enableChangePosition = enableChangePosition + self.enableZoom = enableZoom + self.enableFocus = enableFocus + self.enableChangeFocusModeWhenMoved = enableChangeFocusModeWhenMoved + self.enableShowingCrosshair = enableShowingCrosshair + } +} + +public struct InteractivePreview: View { + private let option: InteractivePreviewOption + private let preview: Preview + + // Zoom + @State private var previousZoomFactor: CGFloat = 1.0 + @State private var currentZoomFactor: CGFloat = 1.0 + + // Foocus + @State private var preferredFocusMode: AVCaptureDevice.FocusMode = .continuousAutoFocus + @State private var focusingLocation = CGPoint.zero + + // Crosshair + @State private var focusFrameOpacity: Double = 0 + @State private var showingCrosshairTask: Task? + + private var subjectAreaChangeMonitoringSubscription: Cancellable? + + init(_ preview: Preview, option: InteractivePreviewOption = .init()) { + self.preview = preview + self.option = option + self.preferredFocusMode = preview.session.currentFocusMode ?? .continuousAutoFocus + + self.subjectAreaChangeMonitoringSubscription = preview + .session + .getSubjectAreaDidChangePublisher() + .sink(receiveValue: { [self] _ in + if option.enableChangeFocusModeWhenMoved { + self.resetFocusMode() + } + }) + + } + + var session: AespaSession { + preview.session + } + + var layer: AVCaptureVideoPreviewLayer { + preview.previewLayer + } + + var currentFocusMode: AVCaptureDevice.FocusMode? { + session.currentFocusMode + } + + var currentCameraPosition: AVCaptureDevice.Position? { + session.currentCameraPosition + } + + public var body: some View { + GeometryReader { geometry in + ZStack { + preview + .gesture(changePositionGesture) + .gesture(tapToFocusGesture(geometry)) + .gesture(pinchZoomGesture) + + // Crosshair + Rectangle() + .stroke(lineWidth: 1) + .foregroundColor(Color.yellow) + .frame(width: 100, height: 100) + .position(focusingLocation) + .opacity(focusFrameOpacity) + .animation(.spring(), value: focusFrameOpacity) + } + } + } +} + +private extension InteractivePreview { + var changePositionGesture: some Gesture { + guard session.isRunning, option.enableChangePosition else { + return TapGesture(count: 2).onEnded {} + } + + return TapGesture(count: 2).onEnded { + let nextPosition: AVCaptureDevice.Position = (currentCameraPosition == .back) ? .front : .back + session.position(to: nextPosition) + } + } + + func tapToFocusGesture(_ geometry: GeometryProxy) -> some Gesture { + guard session.isRunning, option.enableFocus else { + return DragGesture(minimumDistance: 0).onEnded { _ in } + } + + return DragGesture(minimumDistance: 0) + .onEnded { value in + guard + let currentFocusMode, + currentFocusMode == .locked || currentFocusMode == .continuousAutoFocus + else { + return + } + + var point = value.location + point = CGPoint( + x: point.x / geometry.size.width, + y: point.y / geometry.size.height + ) + print(point) + + session.focus(mode: .autoFocus, point: point) { _ in + print("Done") + } + focusingLocation = value.location + + if option.enableShowingCrosshair { + showCrosshair() + } + } + } + + var pinchZoomGesture: some Gesture { + guard session.isRunning, option.enableZoom else { + return MagnificationGesture().onChanged { _ in } .onEnded { _ in } + } + + let maxZoomFactor = session.maxZoomFactor ?? 1.0 + return MagnificationGesture() + .onChanged { (scale) in + let videoZoomFactor = scale * previousZoomFactor + if (videoZoomFactor <= maxZoomFactor) { + let newZoomFactor = max(1.0, min(videoZoomFactor, maxZoomFactor)) + session.zoom(factor: newZoomFactor) + } + } + .onEnded { (scale) in + let videoZoomFactor = scale * previousZoomFactor + previousZoomFactor = videoZoomFactor >= 1 ? videoZoomFactor : 1 + } + } + + func resetFocusMode() { + guard session.isRunning else { return } + session.focus(mode: preferredFocusMode) + } + + func showCrosshair() { + print(option.enableShowingCrosshair) + guard option.enableShowingCrosshair else { return } + + // Cancel the previous task + showingCrosshairTask?.cancel() + // Running a new task + showingCrosshairTask = Task { + // 10^9 nano seconds = 1 second + let second: UInt64 = 1_000_000_000 + + withAnimation { focusFrameOpacity = 1 } + + try await Task.sleep(nanoseconds: 2 * second) + withAnimation { focusFrameOpacity = 0.35 } + + try await Task.sleep(nanoseconds: 3 * second) + withAnimation { focusFrameOpacity = 0 } + } + } +} diff --git a/Sources/Aespa/View/Preview.swift b/Sources/Aespa/View/Preview.swift new file mode 100644 index 0000000..375d98f --- /dev/null +++ b/Sources/Aespa/View/Preview.swift @@ -0,0 +1,43 @@ +// +// File.swift +// +// +// Created by Young Bin on 2023/06/30. +// + +import SwiftUI +import Foundation +import AVFoundation + +struct Preview: UIViewControllerRepresentable { + let session: AespaSession + let gravity: AVLayerVideoGravity + let previewLayer: AVCaptureVideoPreviewLayer + + init( + of session: AespaSession, + gravity: AVLayerVideoGravity + ) { + self.gravity = gravity + self.session = session + self.previewLayer = session.previewLayer + } + + func makeUIViewController(context: Context) -> UIViewController { + let viewController = UIViewController() + viewController.view.backgroundColor = .clear + + return viewController + } + + func updateUIViewController(_ uiViewController: UIViewController, context: Context) { + previewLayer.videoGravity = gravity + uiViewController.view.layer.addSublayer(previewLayer) + + previewLayer.frame = uiViewController.view.bounds + } + + func dismantleUIViewController(_ uiViewController: UIViewController, coordinator: ()) { + previewLayer.removeFromSuperlayer() + } +} diff --git a/Tests/TestHostApp.xcodeproj/project.pbxproj b/Tests/TestHostApp.xcodeproj/project.pbxproj index 5d4a45f..99f9dd1 100644 --- a/Tests/TestHostApp.xcodeproj/project.pbxproj +++ b/Tests/TestHostApp.xcodeproj/project.pbxproj @@ -61,7 +61,7 @@ 9C727D0B2A3FEF9800EF9472 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 9C727D0E2A3FEF9800EF9472 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 9C727D142A3FEF9900EF9472 /* TestHostAppTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = TestHostAppTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - 9C727D542A3FF09400EF9472 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; + 9CA8C9BC2A4AE41300548463 /* Aespa */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = Aespa; path = ..; sourceTree = ""; }; 9CD12FF92A452FA10012D1E1 /* URLCacheStorageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = URLCacheStorageTests.swift; sourceTree = ""; }; 9CD12FFB2A454AC40012D1E1 /* GeneratorTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GeneratorTests.swift; sourceTree = ""; }; 9CD12FFD2A454B770012D1E1 /* MockImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockImage.swift; sourceTree = ""; }; @@ -153,8 +153,8 @@ 9C727CFB2A3FEF9600EF9472 = { isa = PBXGroup; children = ( + 9CA8C9BB2A4AE41300548463 /* Packages */, 9C4BBE4A2A3FF4870071C84F /* Test.xctestplan */, - 9C727D532A3FF09400EF9472 /* Packages */, 9C727D062A3FEF9600EF9472 /* TestHostApp */, 9C4BBE4B2A400E450071C84F /* Tests */, 9C727D052A3FEF9600EF9472 /* Products */, @@ -190,19 +190,19 @@ path = "Preview Content"; sourceTree = ""; }; - 9C727D532A3FF09400EF9472 /* Packages */ = { + 9C727D552A3FF0B100EF9472 /* Frameworks */ = { isa = PBXGroup; children = ( - 9C727D542A3FF09400EF9472 /* Aespa */, ); - name = Packages; + name = Frameworks; sourceTree = ""; }; - 9C727D552A3FF0B100EF9472 /* Frameworks */ = { + 9CA8C9BB2A4AE41300548463 /* Packages */ = { isa = PBXGroup; children = ( + 9CA8C9BC2A4AE41300548463 /* Aespa */, ); - name = Frameworks; + name = Packages; sourceTree = ""; }; 9CF0FE2B2A40573000FEE8C9 /* Data */ = { diff --git a/Tests/Tests/Tuner/ConnectionTunerTests.swift b/Tests/Tests/Tuner/ConnectionTunerTests.swift index 90767ff..a6872c1 100644 --- a/Tests/Tests/Tuner/ConnectionTunerTests.swift +++ b/Tests/Tests/Tuner/ConnectionTunerTests.swift @@ -28,14 +28,14 @@ final class ConnectionTunerTests: XCTestCase { let tuner = VideoOrientationTuner(orientation: orientation) stub(connection) { proxy in - when(proxy.setOrientation(to: equal(to: orientation))).then { value in + when(proxy.orientation(to: equal(to: orientation))).then { value in when(proxy.videoOrientation.get).thenReturn(orientation) } } try tuner.tune(connection) verify(connection) - .setOrientation(to: equal(to: orientation)) + .orientation(to: equal(to: orientation)) .with(returnType: Void.self) XCTAssertEqual(connection.videoOrientation, orientation) @@ -46,14 +46,14 @@ final class ConnectionTunerTests: XCTestCase { let tuner = VideoStabilizationTuner(stabilzationMode: mode) stub(connection) { proxy in - when(proxy.setStabilizationMode(to: equal(to: mode))).then { value in + when(proxy.stabilizationMode(to: equal(to: mode))).then { value in when(proxy.preferredVideoStabilizationMode.get).thenReturn(mode) } } tuner.tune(connection) verify(connection) - .setStabilizationMode(to: equal(to: mode)) + .stabilizationMode(to: equal(to: mode)) .with(returnType: Void.self) XCTAssertEqual(connection.preferredVideoStabilizationMode, mode) diff --git a/Tests/Tests/Tuner/DeviceTunerTests.swift b/Tests/Tests/Tuner/DeviceTunerTests.swift index d5f232c..4fd5a8f 100644 --- a/Tests/Tests/Tuner/DeviceTunerTests.swift +++ b/Tests/Tests/Tuner/DeviceTunerTests.swift @@ -22,21 +22,23 @@ final class DeviceTunerTests: XCTestCase { override func tearDownWithError() throws { device = nil } - - func testAutoFocusTuner() throws { + + func testFocusTuner() throws { let mode = AVCaptureDevice.FocusMode.locked - let tuner = AutoFocusTuner(mode: mode) + let point = CGPoint() + let tuner = FocusTuner(mode: mode, point: point) stub(device) { proxy in when(proxy.isFocusModeSupported(equal(to: mode))).thenReturn(true) - when(proxy.setFocusMode(equal(to: mode))).then { mode in - when(proxy.focusMode.get).thenReturn(mode) + when(proxy.setFocusMode(equal(to: mode), + point: equal(to: point))).then { mode in + when(proxy.focusMode.get).thenReturn(.locked) } } try tuner.tune(device) verify(device) - .setFocusMode(equal(to: mode)) + .setFocusMode(equal(to: mode), point: equal(to: point)) .with(returnType: Void.self) XCTAssertEqual(device.focusMode, mode) @@ -47,14 +49,14 @@ final class DeviceTunerTests: XCTestCase { let tuner = ZoomTuner(zoomFactor: factor) stub(device) { proxy in - when(proxy.setZoomFactor(equal(to: factor))).then { factor in + when(proxy.zoomFactor(equal(to: factor))).then { factor in when(proxy.videoZoomFactor.get).thenReturn(factor) } } tuner.tune(device) verify(device) - .setZoomFactor(equal(to: factor)) + .zoomFactor(equal(to: factor)) .with(returnType: Void.self) XCTAssertEqual(device.videoZoomFactor, factor) @@ -67,13 +69,13 @@ final class DeviceTunerTests: XCTestCase { stub(device) { proxy in when(proxy.hasTorch.get).thenReturn(true) - when(proxy.setTorchMode(equal(to: mode))).thenDoNothing() + when(proxy.torchMode(equal(to: mode))).thenDoNothing() when(proxy.setTorchModeOn(level: level)).thenDoNothing() } try tuner.tune(device) verify(device) - .setTorchMode(equal(to: mode)) + .torchMode(equal(to: mode)) .with(returnType: Void.self) verify(device) diff --git a/Tests/Tests/Tuner/SessionTunerTests.swift b/Tests/Tests/Tuner/SessionTunerTests.swift index 6e1b708..1b07311 100644 --- a/Tests/Tests/Tuner/SessionTunerTests.swift +++ b/Tests/Tests/Tuner/SessionTunerTests.swift @@ -29,12 +29,12 @@ final class SessionTunerTests: XCTestCase { let tuner = QualityTuner(videoQuality: preset) stub(mockSessionProtocol) { proxy in - when(proxy.setVideoQuality(to: any())).thenDoNothing() + when(proxy.videoQuality(to: any())).thenDoNothing() } try tuner.tune(mockSessionProtocol) verify(mockSessionProtocol) - .setVideoQuality(to: equal(to: AVCaptureSession.Preset.cif352x288)) + .videoQuality(to: equal(to: AVCaptureSession.Preset.cif352x288)) .with(returnType: Void.self) } @@ -43,12 +43,12 @@ final class SessionTunerTests: XCTestCase { let tuner = CameraPositionTuner(position: position) stub(mockSessionProtocol) { proxy in - when(proxy.setCameraPosition(to: any(), device: any())).thenDoNothing() + when(proxy.cameraPosition(to: any(), device: any())).thenDoNothing() } try tuner.tune(mockSessionProtocol) verify(mockSessionProtocol) - .setCameraPosition(to: equal(to: AVCaptureDevice.Position.front), device: any()) + .cameraPosition(to: equal(to: AVCaptureDevice.Position.front), device: any()) .with(returnType: Void.self) } @@ -68,62 +68,6 @@ final class SessionTunerTests: XCTestCase { verify(mockSessionProtocol).removeAudioInput() } - func testSessionLaunchTuner_whenNotRunning() throws { - stub(mockSessionProtocol) { proxy in - when(proxy.isRunning.get).thenReturn(false) - - when(proxy.addMovieInput()).thenDoNothing() - when(proxy.addMovieFileOutput()).thenDoNothing() - when(proxy.addCapturePhotoOutput()).thenDoNothing() - - when(proxy.startRunning()).thenDoNothing() - } - - let tuner = SessionLaunchTuner() - try tuner.tune(mockSessionProtocol) - - verify(mockSessionProtocol) - .addMovieInput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .addMovieFileOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .addCapturePhotoOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol) - .startRunning() - .with(returnType: Void.self) - } - - func testSessionLaunchTuner_whenRunning() throws { - stub(mockSessionProtocol) { proxy in - when(proxy.isRunning.get).thenReturn(true) - } - - let tuner = SessionLaunchTuner() - try tuner.tune(mockSessionProtocol) - - verify(mockSessionProtocol, never()) - .addMovieInput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .addMovieFileOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .addCapturePhotoOutput() - .with(returnType: Void.self) - - verify(mockSessionProtocol, never()) - .startRunning() - .with(returnType: Void.self) - } - func testSessionTerminationTuner_whenRunning() throws { stub(mockSessionProtocol) { proxy in when(proxy.isRunning.get).thenReturn(true)