From 29fdfe2f9ca7669710d7c309b77b2b9ae015f11c Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sat, 16 Nov 2024 17:43:14 -0500 Subject: [PATCH 01/13] skip permission if not needed --- ios/ExpoSpeechRecognitionModule.swift | 3 ++- ios/ExpoSpeechRecognizer.swift | 9 ++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/ios/ExpoSpeechRecognitionModule.swift b/ios/ExpoSpeechRecognitionModule.swift index 362e568..d7f5812 100644 --- a/ios/ExpoSpeechRecognitionModule.swift +++ b/ios/ExpoSpeechRecognitionModule.swift @@ -160,7 +160,8 @@ public class ExpoSpeechRecognitionModule: Module { } self.speechRecognizer = try await ExpoSpeechRecognizer( - locale: locale + locale: locale, + requiresOnDeviceRecognition: options.requiresOnDeviceRecognition ) } diff --git a/ios/ExpoSpeechRecognizer.swift b/ios/ExpoSpeechRecognizer.swift index 3a39cc0..18f20a8 100644 --- a/ios/ExpoSpeechRecognizer.swift +++ b/ios/ExpoSpeechRecognizer.swift @@ -46,7 +46,8 @@ actor ExpoSpeechRecognizer: ObservableObject { /// Initializes a new speech recognizer. If this is the first time you've used the class, it /// requests access to the speech recognizer and the microphone. init( - locale: Locale + locale: Locale, + requiresOnDeviceRecognition: Bool ) async throws { recognizer = SFSpeechRecognizer( @@ -57,8 +58,10 @@ actor ExpoSpeechRecognizer: ObservableObject { throw RecognizerError.nilRecognizer } - guard await SFSpeechRecognizer.hasAuthorizationToRecognize() else { - throw RecognizerError.notAuthorizedToRecognize + if !requiresOnDeviceRecognition { + guard await SFSpeechRecognizer.hasAuthorizationToRecognize() else { + throw RecognizerError.notAuthorizedToRecognize + } } guard await AVAudioSession.sharedInstance().hasPermissionToRecord() else { From 73157474497883e5e16ffc4eabead77d5445093e Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sat, 16 Nov 2024 20:33:38 -0500 Subject: [PATCH 02/13] separate permissions --- ios/AudioRecordingRequester.swift | 33 +++++++++++++++++++ ios/ExpoSpeechRecognitionModule.swift | 17 ++++++++-- ....swift => SpeechRecognizerRequester.swift} | 18 +++------- src/ExpoSpeechRecognitionModule.ts | 14 ++++++-- src/ExpoSpeechRecognitionModule.types.ts | 10 +++++- 5 files changed, 73 insertions(+), 19 deletions(-) create mode 100644 ios/AudioRecordingRequester.swift rename ios/{EXSpeechRecognitionPermissionRequester.swift => SpeechRecognizerRequester.swift} (52%) diff --git a/ios/AudioRecordingRequester.swift b/ios/AudioRecordingRequester.swift new file mode 100644 index 0000000..4915ab9 --- /dev/null +++ b/ios/AudioRecordingRequester.swift @@ -0,0 +1,33 @@ +import ExpoModulesCore + +public class AudioRecordingRequester: NSObject, EXPermissionsRequester { + static public func permissionType() -> String { + return "audioRecording" + } + + public func requestPermissions( + resolver resolve: @escaping EXPromiseResolveBlock, rejecter reject: EXPromiseRejectBlock + ) { + AVAudioSession.sharedInstance().requestRecordPermission { authorized in + resolve(self.getPermissions()) + } + } + + public func getPermissions() -> [AnyHashable: Any] { + var status: EXPermissionStatus + + let recordPermission = AVAudioSession.sharedInstance().recordPermission + + if recordPermission == .granted { + status = EXPermissionStatusGranted + } else if recordPermission == .denied { + status = EXPermissionStatusDenied + } else { + status = EXPermissionStatusUndetermined + } + + return [ + "status": status.rawValue + ] + } +} diff --git a/ios/ExpoSpeechRecognitionModule.swift b/ios/ExpoSpeechRecognitionModule.swift index d7f5812..a3e4c41 100644 --- a/ios/ExpoSpeechRecognitionModule.swift +++ b/ios/ExpoSpeechRecognitionModule.swift @@ -104,15 +104,26 @@ public class ExpoSpeechRecognitionModule: Module { guard let permissionsManager = appContext?.permissions else { return } - permissionsManager.register([EXSpeechRecognitionPermissionRequester()]) + permissionsManager.register([AudioRecordingRequester(), SpeechRecognizerRequester()]) } - AsyncFunction("requestPermissionsAsync") { (promise: Promise) in + AsyncFunction("requestAudioRecordingPermissionsAsync") { (promise: Promise) in guard let permissions = appContext?.permissions else { throw Exceptions.PermissionsModuleNotFound() } permissions.askForPermission( - usingRequesterClass: EXSpeechRecognitionPermissionRequester.self, + usingRequesterClass: AudioRecordingRequester.self, + resolve: promise.resolver, + reject: promise.legacyRejecter + ) + } + + AsyncFunction("requestSpeechRecognizerPermissionsAsync") { (promise: Promise) in + guard let permissions = appContext?.permissions else { + throw Exceptions.PermissionsModuleNotFound() + } + permissions.askForPermission( + usingRequesterClass: SpeechRecognizerRequester.self, resolve: promise.resolver, reject: promise.legacyRejecter ) diff --git a/ios/EXSpeechRecognitionPermissionRequester.swift b/ios/SpeechRecognizerRequester.swift similarity index 52% rename from ios/EXSpeechRecognitionPermissionRequester.swift rename to ios/SpeechRecognizerRequester.swift index d8992c7..e18c802 100644 --- a/ios/EXSpeechRecognitionPermissionRequester.swift +++ b/ios/SpeechRecognizerRequester.swift @@ -1,35 +1,27 @@ -import AppTrackingTransparency import ExpoModulesCore import Speech -public class EXSpeechRecognitionPermissionRequester: NSObject, EXPermissionsRequester { +public class SpeechRecognizerRequester: NSObject, EXPermissionsRequester { static public func permissionType() -> String { - return "speechrecognition" + return "speechRecognizer" } public func requestPermissions( resolver resolve: @escaping EXPromiseResolveBlock, rejecter reject: EXPromiseRejectBlock ) { SFSpeechRecognizer.requestAuthorization { status in - if status != .authorized { - resolve(self.getPermissions()) - return - } - AVAudioSession.sharedInstance().requestRecordPermission { authorized in - resolve(self.getPermissions()) - } + resolve(self.getPermissions()) } } public func getPermissions() -> [AnyHashable: Any] { var status: EXPermissionStatus - let recordPermission = AVAudioSession.sharedInstance().recordPermission let speechPermission = SFSpeechRecognizer.authorizationStatus() - if speechPermission == .authorized && recordPermission == .granted { + if speechPermission == .authorized { status = EXPermissionStatusGranted - } else if speechPermission == .denied || recordPermission == .denied { + } else if speechPermission == .denied { status = EXPermissionStatusDenied } else { status = EXPermissionStatusUndetermined diff --git a/src/ExpoSpeechRecognitionModule.ts b/src/ExpoSpeechRecognitionModule.ts index baf408d..63f1613 100644 --- a/src/ExpoSpeechRecognitionModule.ts +++ b/src/ExpoSpeechRecognitionModule.ts @@ -16,8 +16,18 @@ export const ExpoSpeechRecognitionModule: ExpoSpeechRecognitionModuleType = { // Avoid any function bindings when calling the native module stop: () => ExpoSpeechRecognitionNativeModule.stop(), abort: () => ExpoSpeechRecognitionNativeModule.abort(), - requestPermissionsAsync: () => - ExpoSpeechRecognitionNativeModule.requestPermissionsAsync(), + requestPermissionsAsync: async () => { + const microphonePermissions = + await ExpoSpeechRecognitionNativeModule.requestAudioRecordingPermissionsAsync(); + if (microphonePermissions.status === "granted") { + return await ExpoSpeechRecognitionNativeModule.requestSpeechRecognizerPermissionsAsync(); + } + return microphonePermissions; + }, + requestAudioRecordingPermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.requestAudioRecordingPermissionsAsync(), + requestSpeechRecognizerPermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.requestSpeechRecognizerPermissionsAsync(), getPermissionsAsync: () => ExpoSpeechRecognitionNativeModule.getPermissionsAsync(), getStateAsync: () => ExpoSpeechRecognitionNativeModule.getStateAsync(), diff --git a/src/ExpoSpeechRecognitionModule.types.ts b/src/ExpoSpeechRecognitionModule.types.ts index 0ec66f1..5364638 100644 --- a/src/ExpoSpeechRecognitionModule.types.ts +++ b/src/ExpoSpeechRecognitionModule.types.ts @@ -557,10 +557,18 @@ export interface ExpoSpeechRecognitionModuleType extends NativeModule { /** * Presents a dialog to the user to request permissions for using speech recognition and the microphone. * - * For iOS, once a user has granted (or denied) location permissions by responding to the original permission request dialog, + * For iOS, once a user has granted (or denied) permissions by responding to the original permission request dialog, * the only way that the permissions can be changed is by the user themselves using the device settings app. */ requestPermissionsAsync(): Promise; + /** + * Presents a dialog to the user to request permissions for using the microphone. + */ + requestAudioRecordingPermissionsAsync(): Promise; + /** + * Presents a dialog to the user to request permissions for using speech recognition. + */ + requestSpeechRecognizerPermissionsAsync(): Promise; /** * Returns the current permission status for the microphone and speech recognition. */ From 17cb32384ec64c2eb0755dcb617e8b06f9a09626 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sat, 16 Nov 2024 21:22:00 -0500 Subject: [PATCH 03/13] dev --- ...SpeechRecognitionPermissionRequester.swift | 42 +++++++++++++++ ios/ExpoSpeechRecognitionModule.swift | 51 ++++++++++++++----- ...uester.swift => MicrophoneRequester.swift} | 4 +- src/ExpoSpeechRecognitionModule.ts | 22 ++++---- src/ExpoSpeechRecognitionModule.types.ts | 14 +++-- src/ExpoSpeechRecognitionModule.web.ts | 12 +++++ 6 files changed, 110 insertions(+), 35 deletions(-) create mode 100644 ios/EXSpeechRecognitionPermissionRequester.swift rename ios/{AudioRecordingRequester.swift => MicrophoneRequester.swift} (88%) diff --git a/ios/EXSpeechRecognitionPermissionRequester.swift b/ios/EXSpeechRecognitionPermissionRequester.swift new file mode 100644 index 0000000..d8a8a55 --- /dev/null +++ b/ios/EXSpeechRecognitionPermissionRequester.swift @@ -0,0 +1,42 @@ +import AppTrackingTransparency +import ExpoModulesCore +import Speech + +public class EXSpeechRecognitionPermissionRequester: NSObject, EXPermissionsRequester { + static public func permissionType() -> String { + return "speechrecognition" + } + + public func requestPermissions( + resolver resolve: @escaping EXPromiseResolveBlock, rejecter reject: EXPromiseRejectBlock + ) { + SFSpeechRecognizer.requestAuthorization { status in + if status != .authorized { + resolve(self.getPermissions()) + return + } + AVAudioSession.sharedInstance().requestRecordPermission { authorized in + resolve(self.getPermissions()) + } + } + } + + public func getPermissions() -> [AnyHashable: Any] { + var status: EXPermissionStatus + + let recordPermission = AVAudioSession.sharedInstance().recordPermission + let speechPermission = SFSpeechRecognizer.authorizationStatus() + + if speechPermission == .authorized && recordPermission == .granted { + status = EXPermissionStatusGranted + } else if speechPermission == .denied || recordPermission == .denied { + status = EXPermissionStatusDenied + } else { + status = EXPermissionStatusUndetermined + } + + return [ + "status": status.rawValue + ] + } +} \ No newline at end of file diff --git a/ios/ExpoSpeechRecognitionModule.swift b/ios/ExpoSpeechRecognitionModule.swift index a3e4c41..0819d53 100644 --- a/ios/ExpoSpeechRecognitionModule.swift +++ b/ios/ExpoSpeechRecognitionModule.swift @@ -104,37 +104,62 @@ public class ExpoSpeechRecognitionModule: Module { guard let permissionsManager = appContext?.permissions else { return } - permissionsManager.register([AudioRecordingRequester(), SpeechRecognizerRequester()]) + permissionsManager.register([ + EXSpeechRecognitionPermissionRequester(), + MicrophoneRequester(), + SpeechRecognizerRequester() + ]) } - AsyncFunction("requestAudioRecordingPermissionsAsync") { (promise: Promise) in + AsyncFunction("requestPermissionsAsync") { (promise: Promise) in guard let permissions = appContext?.permissions else { throw Exceptions.PermissionsModuleNotFound() } permissions.askForPermission( - usingRequesterClass: AudioRecordingRequester.self, + usingRequesterClass: EXSpeechRecognitionPermissionRequester.self, resolve: promise.resolver, reject: promise.legacyRejecter ) } - AsyncFunction("requestSpeechRecognizerPermissionsAsync") { (promise: Promise) in - guard let permissions = appContext?.permissions else { + AsyncFunction("getPermissionsAsync") { (promise: Promise) in + guard let permissions = self.appContext?.permissions else { throw Exceptions.PermissionsModuleNotFound() } - permissions.askForPermission( - usingRequesterClass: SpeechRecognizerRequester.self, + permissions.getPermissionUsingRequesterClass( + EXSpeechRecognitionPermissionRequester.self, resolve: promise.resolver, reject: promise.legacyRejecter ) } - AsyncFunction("getPermissionsAsync") { (promise: Promise) in - guard let permissions = self.appContext?.permissions else { - throw Exceptions.PermissionsModuleNotFound() - } - permissions.getPermissionUsingRequesterClass( - EXSpeechRecognitionPermissionRequester.self, + AsyncFunction("getMicrophonePermissionsAsync") { (promise: Promise) in + appContext?.permissions?.getPermissionUsingRequesterClass( + MicrophoneRequester.self, + resolve: promise.resolver, + reject: promise.legacyRejecter + ) + } + + AsyncFunction("requestMicrophonePermissionsAsync") { (promise: Promise) in + appContext?.permissions?.askForPermission( + usingRequesterClass: MicrophoneRequester.self, + resolve: promise.resolver, + reject: promise.legacyRejecter + ) + } + + AsyncFunction("getSpeechRecognizerPermissionsAsync") { (promise: Promise) in + appContext?.permissions?.getPermissionUsingRequesterClass( + SpeechRecognizerRequester.self, + resolve: promise.resolver, + reject: promise.legacyRejecter + ) + } + + AsyncFunction("requestSpeechRecognizerPermissionsAsync") { (promise: Promise) in + appContext?.permissions?.askForPermission( + usingRequesterClass: SpeechRecognizerRequester.self, resolve: promise.resolver, reject: promise.legacyRejecter ) diff --git a/ios/AudioRecordingRequester.swift b/ios/MicrophoneRequester.swift similarity index 88% rename from ios/AudioRecordingRequester.swift rename to ios/MicrophoneRequester.swift index 4915ab9..5f22a87 100644 --- a/ios/AudioRecordingRequester.swift +++ b/ios/MicrophoneRequester.swift @@ -1,8 +1,8 @@ import ExpoModulesCore -public class AudioRecordingRequester: NSObject, EXPermissionsRequester { +public class MicrophoneRequester: NSObject, EXPermissionsRequester { static public func permissionType() -> String { - return "audioRecording" + return "microphone" } public func requestPermissions( diff --git a/src/ExpoSpeechRecognitionModule.ts b/src/ExpoSpeechRecognitionModule.ts index 63f1613..d5187b1 100644 --- a/src/ExpoSpeechRecognitionModule.ts +++ b/src/ExpoSpeechRecognitionModule.ts @@ -16,20 +16,18 @@ export const ExpoSpeechRecognitionModule: ExpoSpeechRecognitionModuleType = { // Avoid any function bindings when calling the native module stop: () => ExpoSpeechRecognitionNativeModule.stop(), abort: () => ExpoSpeechRecognitionNativeModule.abort(), - requestPermissionsAsync: async () => { - const microphonePermissions = - await ExpoSpeechRecognitionNativeModule.requestAudioRecordingPermissionsAsync(); - if (microphonePermissions.status === "granted") { - return await ExpoSpeechRecognitionNativeModule.requestSpeechRecognizerPermissionsAsync(); - } - return microphonePermissions; - }, - requestAudioRecordingPermissionsAsync: () => - ExpoSpeechRecognitionNativeModule.requestAudioRecordingPermissionsAsync(), - requestSpeechRecognizerPermissionsAsync: () => - ExpoSpeechRecognitionNativeModule.requestSpeechRecognizerPermissionsAsync(), + requestPermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.requestPermissionsAsync(), getPermissionsAsync: () => ExpoSpeechRecognitionNativeModule.getPermissionsAsync(), + getMicrophonePermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.getMicrophonePermissionsAsync(), + requestMicrophonePermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.requestMicrophonePermissionsAsync(), + getSpeechRecognizerPermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.getSpeechRecognizerPermissionsAsync(), + requestSpeechRecognizerPermissionsAsync: () => + ExpoSpeechRecognitionNativeModule.requestSpeechRecognizerPermissionsAsync(), getStateAsync: () => ExpoSpeechRecognitionNativeModule.getStateAsync(), getAssistantService: () => ExpoSpeechRecognitionNativeModule.getAssistantService(), diff --git a/src/ExpoSpeechRecognitionModule.types.ts b/src/ExpoSpeechRecognitionModule.types.ts index 5364638..5ed33c1 100644 --- a/src/ExpoSpeechRecognitionModule.types.ts +++ b/src/ExpoSpeechRecognitionModule.types.ts @@ -561,18 +561,16 @@ export interface ExpoSpeechRecognitionModuleType extends NativeModule { * the only way that the permissions can be changed is by the user themselves using the device settings app. */ requestPermissionsAsync(): Promise; - /** - * Presents a dialog to the user to request permissions for using the microphone. - */ - requestAudioRecordingPermissionsAsync(): Promise; - /** - * Presents a dialog to the user to request permissions for using speech recognition. - */ - requestSpeechRecognizerPermissionsAsync(): Promise; /** * Returns the current permission status for the microphone and speech recognition. */ getPermissionsAsync(): Promise; + + getMicrophonePermissionsAsync(): Promise; + requestMicrophonePermissionsAsync(): Promise; + getSpeechRecognizerPermissionsAsync(): Promise; + requestSpeechRecognizerPermissionsAsync(): Promise; + /** * Returns an array of locales supported by the speech recognizer. * diff --git a/src/ExpoSpeechRecognitionModule.web.ts b/src/ExpoSpeechRecognitionModule.web.ts index 0679570..cae8a03 100644 --- a/src/ExpoSpeechRecognitionModule.web.ts +++ b/src/ExpoSpeechRecognitionModule.web.ts @@ -62,6 +62,18 @@ export const ExpoSpeechRecognitionModule: ExpoSpeechRecognitionModuleType = { status: "denied", } as PermissionResponse); }, + getMicrophonePermissionsAsync: () => { + return Promise.reject(Error("Not implemented")); + }, + requestMicrophonePermissionsAsync: () => { + return Promise.reject(Error("Not implemented")); + }, + getSpeechRecognizerPermissionsAsync: () => { + return Promise.reject(Error("Not implemented")); + }, + requestSpeechRecognizerPermissionsAsync: () => { + return Promise.reject(Error("Not implemented")); + }, getSupportedLocales: async () => { console.warn( "getSupportedLocales is not supported on web. Returning an empty array.", From 5e655c2dbb9e56b446803f16d1a953c2cbaac1f6 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sat, 16 Nov 2024 21:26:38 -0500 Subject: [PATCH 04/13] Update App.tsx --- example/App.tsx | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/example/App.tsx b/example/App.tsx index f5c0fb6..cdb1072 100644 --- a/example/App.tsx +++ b/example/App.tsx @@ -811,6 +811,50 @@ function OtherSettings(props: { ); }} /> + { + ExpoSpeechRecognitionModule.getMicrophonePermissionsAsync().then( + (result) => { + Alert.alert("Result", JSON.stringify(result)); + }, + ); + }} + /> + { + ExpoSpeechRecognitionModule.requestMicrophonePermissionsAsync().then( + (result) => { + Alert.alert("Result", JSON.stringify(result)); + }, + ); + }} + /> + { + ExpoSpeechRecognitionModule.getSpeechRecognizerPermissionsAsync().then( + (result) => { + Alert.alert("Result", JSON.stringify(result)); + }, + ); + }} + /> + { + ExpoSpeechRecognitionModule.requestSpeechRecognizerPermissionsAsync().then( + (result) => { + Alert.alert("Result", JSON.stringify(result)); + }, + ); + }} + /> Date: Sat, 16 Nov 2024 21:35:33 -0500 Subject: [PATCH 05/13] Update Podfile.lock --- example/ios/Podfile.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index aaf1870..6795404 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -39,7 +39,7 @@ PODS: - ReactCommon/turbomodule/bridging - ReactCommon/turbomodule/core - Yoga - - ExpoSpeechRecognition (0.2.23): + - ExpoSpeechRecognition (0.2.25): - ExpoModulesCore - EXSplashScreen (0.27.6): - DoubleConversion @@ -1467,7 +1467,7 @@ SPEC CHECKSUMS: ExpoFont: 00756e6c796d8f7ee8d211e29c8b619e75cbf238 ExpoKeepAwake: 3b8815d9dd1d419ee474df004021c69fdd316d08 ExpoModulesCore: 260ee156852434da26e782bbb993093186c5aade - ExpoSpeechRecognition: 078201eb1fbae87409cfecba6176edc262f75121 + ExpoSpeechRecognition: f61156226f963808aeadcde6d463133e162f138a EXSplashScreen: 10b116117c9bb6a272ba782706f21dadc23f44d9 FBLazyVector: ac12dc084d1c8ec4cc4d7b3cf1b0ebda6dab85af fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120 @@ -1522,7 +1522,7 @@ SPEC CHECKSUMS: ReactCommon: f7da14a8827b72704169a48c929bcde802698361 RNReanimated: 58a768c2c17a5589ef732fa6bd8d7ed0eb6df1c1 SocketRocket: abac6f5de4d4d62d24e11868d7a2f427e0ef940d - Yoga: 2246eea72aaf1b816a68a35e6e4b74563653ae09 + Yoga: 950bbfd7e6f04790fdb51149ed51df41f329fcc8 PODFILE CHECKSUM: ceec9518dac8a3c13e167f58439e04a010b51688 From cb1388c705fc2594bfacdee36621073dd5463375 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sat, 16 Nov 2024 21:35:37 -0500 Subject: [PATCH 06/13] Update App.tsx --- example/App.tsx | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/example/App.tsx b/example/App.tsx index cdb1072..04f30e6 100644 --- a/example/App.tsx +++ b/example/App.tsx @@ -128,7 +128,7 @@ export default function App() { console.log("[event]: languagedetection", ev); }); - const startListening = () => { + const startListening = async () => { if (status !== "idle") { return; } @@ -136,16 +136,27 @@ export default function App() { setError(null); setStatus("starting"); - ExpoSpeechRecognitionModule.requestPermissionsAsync().then((result) => { - console.log("Permissions", result); - if (!result.granted) { - console.log("Permissions not granted", result); + const microphonePermissions = + await ExpoSpeechRecognitionModule.requestMicrophonePermissionsAsync(); + console.log("Microphone permissions", microphonePermissions); + if (!microphonePermissions.granted) { + setError({ error: "not-allowed", message: "Permissions not granted" }); + setStatus("idle"); + return; + } + + if (!settings.requiresOnDeviceRecognition) { + const speechRecognizerPermissions = + await ExpoSpeechRecognitionModule.requestSpeechRecognizerPermissionsAsync(); + console.log("Speech recognizer permissions", speechRecognizerPermissions); + if (!speechRecognizerPermissions.granted) { setError({ error: "not-allowed", message: "Permissions not granted" }); setStatus("idle"); return; } - ExpoSpeechRecognitionModule.start(settings); - }); + } + + ExpoSpeechRecognitionModule.start(settings); }; return ( From 0e8f6e8d9a86ca44d143c966af07b1615e8dbc09 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sun, 17 Nov 2024 10:28:02 -0500 Subject: [PATCH 07/13] moving permission check --- ios/ExpoSpeechRecognitionModule.swift | 21 +++++++++++++++++++-- ios/ExpoSpeechRecognizer.swift | 13 +------------ 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/ios/ExpoSpeechRecognitionModule.swift b/ios/ExpoSpeechRecognitionModule.swift index 0819d53..7d9d110 100644 --- a/ios/ExpoSpeechRecognitionModule.swift +++ b/ios/ExpoSpeechRecognitionModule.swift @@ -196,10 +196,27 @@ public class ExpoSpeechRecognitionModule: Module { } self.speechRecognizer = try await ExpoSpeechRecognizer( - locale: locale, - requiresOnDeviceRecognition: options.requiresOnDeviceRecognition + locale: locale ) } + + if !options.requiresOnDeviceRecognition { + guard await SFSpeechRecognizer.hasAuthorizationToRecognize() else { + sendErrorAndStop( + error: "not-allowed", + message: RecognizerError.notAuthorizedToRecognize.message + ) + return + } + } + + guard await AVAudioSession.sharedInstance().hasPermissionToRecord() else { + sendErrorAndStop( + error: "not-allowed", + message: RecognizerError.notPermittedToRecord.message + ) + return + } // Start recognition! await speechRecognizer?.start( diff --git a/ios/ExpoSpeechRecognizer.swift b/ios/ExpoSpeechRecognizer.swift index 18f20a8..1fefebc 100644 --- a/ios/ExpoSpeechRecognizer.swift +++ b/ios/ExpoSpeechRecognizer.swift @@ -46,8 +46,7 @@ actor ExpoSpeechRecognizer: ObservableObject { /// Initializes a new speech recognizer. If this is the first time you've used the class, it /// requests access to the speech recognizer and the microphone. init( - locale: Locale, - requiresOnDeviceRecognition: Bool + locale: Locale ) async throws { recognizer = SFSpeechRecognizer( @@ -57,16 +56,6 @@ actor ExpoSpeechRecognizer: ObservableObject { guard recognizer != nil else { throw RecognizerError.nilRecognizer } - - if !requiresOnDeviceRecognition { - guard await SFSpeechRecognizer.hasAuthorizationToRecognize() else { - throw RecognizerError.notAuthorizedToRecognize - } - } - - guard await AVAudioSession.sharedInstance().hasPermissionToRecord() else { - throw RecognizerError.notPermittedToRecord - } } /// Returns a suitable audio format to use for the speech recognition task and audio file recording. From 0548b9db365fb6247af064bc2b7b1f494c19a2ee Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Sun, 17 Nov 2024 10:33:44 -0500 Subject: [PATCH 08/13] deprecated --- ...SpeechRecognitionPermissionRequester.swift | 2 +- src/ExpoSpeechRecognitionModule.types.ts | 24 +++++++++++++++++-- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/ios/EXSpeechRecognitionPermissionRequester.swift b/ios/EXSpeechRecognitionPermissionRequester.swift index d8a8a55..d8992c7 100644 --- a/ios/EXSpeechRecognitionPermissionRequester.swift +++ b/ios/EXSpeechRecognitionPermissionRequester.swift @@ -39,4 +39,4 @@ public class EXSpeechRecognitionPermissionRequester: NSObject, EXPermissionsRequ "status": status.rawValue ] } -} \ No newline at end of file +} diff --git a/src/ExpoSpeechRecognitionModule.types.ts b/src/ExpoSpeechRecognitionModule.types.ts index 5ed33c1..2122ac5 100644 --- a/src/ExpoSpeechRecognitionModule.types.ts +++ b/src/ExpoSpeechRecognitionModule.types.ts @@ -559,18 +559,38 @@ export interface ExpoSpeechRecognitionModuleType extends NativeModule { * * For iOS, once a user has granted (or denied) permissions by responding to the original permission request dialog, * the only way that the permissions can be changed is by the user themselves using the device settings app. + * + * @deprecated Use `requestMicrophonePermissionsAsync` and `requestSpeechRecognizerPermissionsAsync` instead. */ requestPermissionsAsync(): Promise; /** * Returns the current permission status for the microphone and speech recognition. + * + * @deprecated Use `getMicrophonePermissionsAsync` and `getSpeechRecognizerPermissionsAsync` instead. */ getPermissionsAsync(): Promise; - + /** + * Returns the current permission status for the microphone. + */ getMicrophonePermissionsAsync(): Promise; + /** + * Presents a dialog to the user to request permissions for using the microphone. + * + * For iOS, once a user has granted (or denied) permissions by responding to the original permission request dialog, + * the only way that the permissions can be changed is by the user themselves using the device settings app. + */ requestMicrophonePermissionsAsync(): Promise; + /** + * Returns the current permission status for speech recognition. + */ getSpeechRecognizerPermissionsAsync(): Promise; + /** + * Presents a dialog to the user to request permissions for using speech recognition. + * + * For iOS, once a user has granted (or denied) permissions by responding to the original permission request dialog, + * the only way that the permissions can be changed is by the user themselves using the device settings app. + */ requestSpeechRecognizerPermissionsAsync(): Promise; - /** * Returns an array of locales supported by the speech recognizer. * From 76d933368941c3ae1759fc9457728848c5043579 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Mon, 18 Nov 2024 08:32:59 -0500 Subject: [PATCH 09/13] Update example/App.tsx Co-authored-by: jamsch <12927717+jamsch@users.noreply.github.com> --- example/App.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/App.tsx b/example/App.tsx index 04f30e6..dd1819e 100644 --- a/example/App.tsx +++ b/example/App.tsx @@ -145,7 +145,7 @@ export default function App() { return; } - if (!settings.requiresOnDeviceRecognition) { + if (!settings.requiresOnDeviceRecognition && Platform.OS === "ios") { const speechRecognizerPermissions = await ExpoSpeechRecognitionModule.requestSpeechRecognizerPermissionsAsync(); console.log("Speech recognizer permissions", speechRecognizerPermissions); From 1b6753e5549efc85a5f5d40b27a35b545a01b1ea Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Mon, 18 Nov 2024 12:26:49 -0500 Subject: [PATCH 10/13] return granted response on web --- src/ExpoSpeechRecognitionModule.web.ts | 57 ++++++++++++++++++++++++-- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/src/ExpoSpeechRecognitionModule.web.ts b/src/ExpoSpeechRecognitionModule.web.ts index 3e55f5d..f3186bc 100644 --- a/src/ExpoSpeechRecognitionModule.web.ts +++ b/src/ExpoSpeechRecognitionModule.web.ts @@ -152,6 +152,9 @@ class ExpoSpeechRecognitionModuleWeb extends NativeModule Date: Mon, 18 Nov 2024 12:29:54 -0500 Subject: [PATCH 11/13] Update src/ExpoSpeechRecognitionModule.types.ts Co-authored-by: jamsch <12927717+jamsch@users.noreply.github.com> --- src/ExpoSpeechRecognitionModule.types.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ExpoSpeechRecognitionModule.types.ts b/src/ExpoSpeechRecognitionModule.types.ts index 72b4f75..c2925c4 100644 --- a/src/ExpoSpeechRecognitionModule.types.ts +++ b/src/ExpoSpeechRecognitionModule.types.ts @@ -591,7 +591,8 @@ export declare class ExpoSpeechRecognitionModuleType extends NativeModule; /** - * Presents a dialog to the user to request permissions for using speech recognition. + * [iOS only] Presents a dialog to the user to request permissions for using the speech recognizer. + * This permission is required when `requiresOnDeviceRecognition` is disabled (i.e. network-based recognition) * * For iOS, once a user has granted (or denied) permissions by responding to the original permission request dialog, * the only way that the permissions can be changed is by the user themselves using the device settings app. From 257859c3d437a971edd6e2461e4100ab399b5670 Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Mon, 18 Nov 2024 12:59:00 -0500 Subject: [PATCH 12/13] More docs update and removing deprecated --- src/ExpoSpeechRecognitionModule.types.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/ExpoSpeechRecognitionModule.types.ts b/src/ExpoSpeechRecognitionModule.types.ts index c2925c4..c5342e0 100644 --- a/src/ExpoSpeechRecognitionModule.types.ts +++ b/src/ExpoSpeechRecognitionModule.types.ts @@ -563,16 +563,17 @@ export declare class ExpoSpeechRecognitionModuleType extends NativeModule; /** - * Returns the current permission status for the microphone and speech recognition. + * Returns the current permission status for speech recognition and the microphone. * - * @deprecated Use `getMicrophonePermissionsAsync` and `getSpeechRecognizerPermissionsAsync` instead. + * You may also use `getMicrophonePermissionsAsync` and `getSpeechRecognizerPermissionsAsync` to get the permissions separately. */ getPermissionsAsync(): Promise; /** From eb14871ec893d3504a22b03087a044351fd6723f Mon Sep 17 00:00:00 2001 From: Michael Schwartz Date: Mon, 18 Nov 2024 13:59:54 -0500 Subject: [PATCH 13/13] Adding Android permission functions --- .../ExpoSpeechRecognitionModule.kt | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/android/src/main/java/expo/modules/speechrecognition/ExpoSpeechRecognitionModule.kt b/android/src/main/java/expo/modules/speechrecognition/ExpoSpeechRecognitionModule.kt index 570c8a3..06b6fd0 100644 --- a/android/src/main/java/expo/modules/speechrecognition/ExpoSpeechRecognitionModule.kt +++ b/android/src/main/java/expo/modules/speechrecognition/ExpoSpeechRecognitionModule.kt @@ -6,6 +6,7 @@ import android.content.ComponentName import android.content.Context import android.content.Intent import android.os.Build +import android.os.Bundle import android.os.Handler import android.provider.Settings import android.speech.ModelDownloadListener @@ -16,6 +17,7 @@ import android.speech.RecognizerIntent import android.speech.SpeechRecognizer import android.util.Log import androidx.annotation.RequiresApi +import expo.modules.interfaces.permissions.PermissionsResponse import expo.modules.interfaces.permissions.Permissions.askForPermissionsWithPermissionsManager import expo.modules.interfaces.permissions.Permissions.getPermissionsWithPermissionsManager import expo.modules.kotlin.Promise @@ -143,6 +145,46 @@ class ExpoSpeechRecognitionModule : Module() { ) } + AsyncFunction("requestMicrophonePermissionsAsync") { promise: Promise -> + askForPermissionsWithPermissionsManager( + appContext.permissions, + promise, + RECORD_AUDIO, + ) + } + + AsyncFunction("getMicrophonePermissionsAsync") { promise: Promise -> + getPermissionsWithPermissionsManager( + appContext.permissions, + promise, + RECORD_AUDIO, + ) + } + + AsyncFunction("getSpeechRecognizerPermissionsAsync") { promise: Promise -> + Log.w("ExpoSpeechRecognitionModule", "getSpeechRecognizerPermissionsAsync is not supported on Android. Returning a granted permission response.") + promise.resolve( + Bundle().apply { + putString(PermissionsResponse.EXPIRES_KEY, "never") + putString(PermissionsResponse.STATUS_KEY, "granted") + putBoolean(PermissionsResponse.CAN_ASK_AGAIN_KEY, false) + putBoolean(PermissionsResponse.GRANTED_KEY, true) + } + ) + } + + AsyncFunction("requestSpeechRecognizerPermissionsAsync") { promise: Promise -> + Log.w("ExpoSpeechRecognitionModule", "requestSpeechRecognizerPermissionsAsync is not supported on Android. Returning a granted permission response.") + promise.resolve( + Bundle().apply { + putString(PermissionsResponse.EXPIRES_KEY, "never") + putString(PermissionsResponse.STATUS_KEY, "granted") + putBoolean(PermissionsResponse.CAN_ASK_AGAIN_KEY, false) + putBoolean(PermissionsResponse.GRANTED_KEY, true) + } + ) + } + AsyncFunction("getStateAsync") { promise: Promise -> val state = when (expoSpeechService.recognitionState) {