From 0af112b37542984e12cdbfc60c4b1f64ef26a07c Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Tue, 29 Jul 2025 09:51:28 +0200 Subject: [PATCH 1/3] Migrate startImageStream method to Swift --- .../camera/camera_avfoundation/CHANGELOG.md | 4 ++ .../camera_avfoundation/DefaultCamera.swift | 47 +++++++++++++++++++ .../Sources/camera_avfoundation_objc/FLTCam.m | 47 ------------------- .../include/camera_avfoundation/FLTCam.h | 2 - .../include/camera_avfoundation/FLTCam_Test.h | 5 -- .../camera/camera_avfoundation/pubspec.yaml | 2 +- 6 files changed, 52 insertions(+), 55 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index b7c45549d10..c4d10e4190a 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.21+1 + +* Migrates `startImageStream` method to Swift. + ## 0.9.21 * Fixes crash when streaming is enabled during recording. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 7672d343c6d..7a2a3f00093 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -728,6 +728,53 @@ final class DefaultCamera: FLTCam, Camera { completion(nil) } + func startImageStream( + with messenger: any FlutterBinaryMessenger, completion: @escaping (FlutterError?) -> Void + ) { + startImageStream( + with: messenger, + imageStreamHandler: FLTImageStreamHandler(captureSessionQueue: captureSessionQueue), + completion: completion + ) + } + + func startImageStream( + with messenger: FlutterBinaryMessenger, + imageStreamHandler: FLTImageStreamHandler, + completion: @escaping (FlutterError?) -> Void + ) { + if isStreamingImages { + reportErrorMessage("Images from camera are already streaming!") + completion(nil) + return + } + + let eventChannel = FlutterEventChannel( + name: "plugins.flutter.io/camera_avfoundation/imageStream", + binaryMessenger: messenger + ) + let threadSafeEventChannel = FLTThreadSafeEventChannel(eventChannel: eventChannel) + + self.imageStreamHandler = imageStreamHandler + threadSafeEventChannel.setStreamHandler(imageStreamHandler) { [weak self] in + guard let strongSelf = self else { + completion(nil) + return + } + + strongSelf.captureSessionQueue.async { [weak self] in + guard let strongSelf = self else { + completion(nil) + return + } + + strongSelf.isStreamingImages = true + strongSelf.streamingPendingFramesCount = 0 + completion(nil) + } + } + } + func stopImageStream() { if isStreamingImages { isStreamingImages = false diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m index 4b3f1bb82b1..099aaea4d0a 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m @@ -308,53 +308,6 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset return bestFormat; } -- (void)startImageStreamWithMessenger:(NSObject *)messenger - completion:(void (^)(FlutterError *))completion { - [self startImageStreamWithMessenger:messenger - imageStreamHandler:[[FLTImageStreamHandler alloc] - initWithCaptureSessionQueue:_captureSessionQueue] - completion:completion]; -} - -- (void)startImageStreamWithMessenger:(NSObject *)messenger - imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler - completion:(void (^)(FlutterError *))completion { - if (!_isStreamingImages) { - id eventChannel = [FlutterEventChannel - eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" - binaryMessenger:messenger]; - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; - - _imageStreamHandler = imageStreamHandler; - __weak typeof(self) weakSelf = self; - [threadSafeEventChannel setStreamHandler:_imageStreamHandler - completion:^{ - typeof(self) strongSelf = weakSelf; - if (!strongSelf) { - completion(nil); - return; - } - - dispatch_async(strongSelf.captureSessionQueue, ^{ - // cannot use the outter strongSelf - typeof(self) strongSelf = weakSelf; - if (!strongSelf) { - completion(nil); - return; - } - - strongSelf.isStreamingImages = YES; - strongSelf.streamingPendingFramesCount = 0; - completion(nil); - }); - }]; - } else { - [self reportErrorMessage:@"Images from camera are already streaming!"]; - completion(nil); - } -} - // This function, although slightly modified, is also in video_player_avfoundation. // Both need to do the same thing and run on the same thread (for example main thread). // Configure application wide audio session manually to prevent overwriting flag diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 9ecede50b5e..2a0a19b2f81 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -70,8 +70,6 @@ NS_ASSUME_NONNULL_BEGIN /// @param error report to the caller if any error happened creating the camera. - (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; -- (void)startImageStreamWithMessenger:(NSObject *)messenger - completion:(nonnull void (^)(FlutterError *_Nullable))completion; - (void)setUpCaptureSessionForAudioIfNeeded; // Methods exposed for the Swift DefaultCamera subclass diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h index 2e9ab4aafb5..a604a22fd39 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam_Test.h @@ -31,9 +31,4 @@ @property(readonly, nonatomic) NSMutableDictionary *inProgressSavePhotoDelegates; -/// Start streaming images. -- (void)startImageStreamWithMessenger:(NSObject *)messenger - imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler - completion:(void (^)(FlutterError *))completion; - @end diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index 372b322ef2f..ba90f6ab57c 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.21 +version: 0.9.21+1 environment: sdk: ^3.6.0 From b1e613d3933c70736978fbbd9f56fd0b36c9da66 Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Tue, 29 Jul 2025 10:14:47 +0200 Subject: [PATCH 2/3] Migrate startImageStream and setUpCaptureSessionForAudioIfNeeded methods to Swift --- .../camera/camera_avfoundation/CHANGELOG.md | 3 +- .../camera_avfoundation/DefaultCamera.swift | 89 +++++++++++++++++ .../Sources/camera_avfoundation_objc/FLTCam.m | 96 +------------------ .../include/camera_avfoundation/FLTCam.h | 5 +- 4 files changed, 95 insertions(+), 98 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index c4d10e4190a..c37b5a7f628 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,6 +1,7 @@ ## 0.9.21+1 -* Migrates `startImageStream` method to Swift. +* Migrates `startImageStream` and `setUpCaptureSessionForAudioIfNeeded` methods to Swift. +* Removes Objective-C implementation of `reportErrorMessage` method. ## 0.9.21 diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 7a2a3f00093..69408765cf5 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -92,6 +92,92 @@ final class DefaultCamera: FLTCam, Camera { return (captureVideoInput, captureVideoOutput, connection) } + func setUpCaptureSessionForAudioIfNeeded() { + // Don't setup audio twice or we will lose the audio. + guard !mediaSettings.enableAudio || !isAudioSetup else { return } + + let audioDevice = audioCaptureDeviceFactory() + do { + // Create a device input with the device and add it to the session. + // Setup the audio input. + let audioInput = try captureDeviceInputFactory.deviceInput(with: audioDevice) + + // Setup the audio output. + let audioOutput = AVCaptureAudioDataOutput() + + let block = { + // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other + // plugins like video_player. + DefaultCamera.upgradeAudioSessionCategory( + requestedCategory: .playAndRecord, + options: [.defaultToSpeaker, .allowBluetoothA2DP, .allowAirPlay] + ) + } + + if !Thread.isMainThread { + DispatchQueue.main.sync(execute: block) + } else { + block() + } + + if audioCaptureSession.canAddInput(audioInput) { + audioCaptureSession.addInput(audioInput) + + if audioCaptureSession.canAddOutput(audioOutput) { + audioCaptureSession.addOutput(audioOutput) + audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) + isAudioSetup = true + } else { + reportErrorMessage("Unable to add Audio input/output to session capture") + isAudioSetup = false + } + } + } catch let error as NSError { + reportErrorMessage(error.description) + } + } + + // This function, although slightly modified, is also in video_player_avfoundation (in ObjC). + // Both need to do the same thing and run on the same thread (for example main thread). + // Configure application wide audio session manually to prevent overwriting flag + // MixWithOthers by capture session. + // Only change category if it is considered an upgrade which means it can only enable + // ability to play in silent mode or ability to record audio but never disables it, + // that could affect other plugins which depend on this global state. Only change + // category or options if there is change to prevent unnecessary lags and silence. + private static func upgradeAudioSessionCategory( + requestedCategory: AVAudioSession.Category, + options: AVAudioSession.CategoryOptions + ) { + let playCategories: Set = [.playback, .playAndRecord] + let recordCategories: Set = [.record, .playAndRecord] + let requiredCategories: Set = [ + requestedCategory, AVAudioSession.sharedInstance().category, + ] + + let requiresPlay = !requiredCategories.isDisjoint(with: playCategories) + let requiresRecord = !requiredCategories.isDisjoint(with: recordCategories) + + var finalCategory = requestedCategory + if requiresPlay && requiresRecord { + finalCategory = .playAndRecord + } else if requiresPlay { + finalCategory = .playback + } else if requiresRecord { + finalCategory = .record + } + + let finalOptions = AVAudioSession.sharedInstance().categoryOptions.union(options) + + if finalCategory == AVAudioSession.sharedInstance().category + && finalOptions == AVAudioSession.sharedInstance().categoryOptions + { + return + } + + try? AVAudioSession.sharedInstance().setCategory(finalCategory, options: finalOptions) + } + func reportInitializationState() { // Get all the state on the current thread, not the main thread. let state = FCPPlatformCameraState.make( @@ -1036,6 +1122,9 @@ final class DefaultCamera: FLTCam, Camera { } } + /// Reports the given error message to the Dart side of the plugin. + /// + /// Can be called from any thread. private func reportErrorMessage(_ errorMessage: String) { FLTEnsureToRunOnMainQueue { [weak self] in self?.dartAPI?.reportError(errorMessage) { _ in diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m index 099aaea4d0a..62c5eae2829 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/FLTCam.m @@ -28,17 +28,11 @@ @interface FLTCam () *assetWriterPixelBufferAdaptor; @property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; -@property(assign, nonatomic) BOOL isAudioSetup; /// A wrapper for CMVideoFormatDescriptionGetDimensions. /// Allows for alternate implementations in tests. @property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat; -/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. -@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory; -/// Reports the given error message to the Dart side of the plugin. -/// -/// Can be called from any thread. -- (void)reportErrorMessage:(NSString *)errorMessage; + @end @implementation FLTCam @@ -308,92 +302,4 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset return bestFormat; } -// This function, although slightly modified, is also in video_player_avfoundation. -// Both need to do the same thing and run on the same thread (for example main thread). -// Configure application wide audio session manually to prevent overwriting flag -// MixWithOthers by capture session. -// Only change category if it is considered an upgrade which means it can only enable -// ability to play in silent mode or ability to record audio but never disables it, -// that could affect other plugins which depend on this global state. Only change -// category or options if there is change to prevent unnecessary lags and silence. -static void upgradeAudioSessionCategory(AVAudioSessionCategory requestedCategory, - AVAudioSessionCategoryOptions options) { - NSSet *playCategories = [NSSet - setWithObjects:AVAudioSessionCategoryPlayback, AVAudioSessionCategoryPlayAndRecord, nil]; - NSSet *recordCategories = - [NSSet setWithObjects:AVAudioSessionCategoryRecord, AVAudioSessionCategoryPlayAndRecord, nil]; - NSSet *requiredCategories = - [NSSet setWithObjects:requestedCategory, AVAudioSession.sharedInstance.category, nil]; - BOOL requiresPlay = [requiredCategories intersectsSet:playCategories]; - BOOL requiresRecord = [requiredCategories intersectsSet:recordCategories]; - if (requiresPlay && requiresRecord) { - requestedCategory = AVAudioSessionCategoryPlayAndRecord; - } else if (requiresPlay) { - requestedCategory = AVAudioSessionCategoryPlayback; - } else if (requiresRecord) { - requestedCategory = AVAudioSessionCategoryRecord; - } - options = AVAudioSession.sharedInstance.categoryOptions | options; - if ([requestedCategory isEqualToString:AVAudioSession.sharedInstance.category] && - options == AVAudioSession.sharedInstance.categoryOptions) { - return; - } - [AVAudioSession.sharedInstance setCategory:requestedCategory withOptions:options error:nil]; -} - -- (void)setUpCaptureSessionForAudioIfNeeded { - // Don't setup audio twice or we will lose the audio. - if (!_mediaSettings.enableAudio || _isAudioSetup) { - return; - } - - NSError *error = nil; - // Create a device input with the device and add it to the session. - // Setup the audio input. - NSObject *audioDevice = self.audioCaptureDeviceFactory(); - NSObject *audioInput = - [_captureDeviceInputFactory deviceInputWithDevice:audioDevice error:&error]; - if (error) { - [self reportErrorMessage:error.description]; - } - // Setup the audio output. - _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; - - dispatch_block_t block = ^{ - // Set up options implicit to AVAudioSessionCategoryPlayback to avoid conflicts with other - // plugins like video_player. - upgradeAudioSessionCategory(AVAudioSessionCategoryPlayAndRecord, - AVAudioSessionCategoryOptionDefaultToSpeaker | - AVAudioSessionCategoryOptionAllowBluetoothA2DP | - AVAudioSessionCategoryOptionAllowAirPlay); - }; - if (!NSThread.isMainThread) { - dispatch_sync(dispatch_get_main_queue(), block); - } else { - block(); - } - - if ([_audioCaptureSession canAddInput:audioInput]) { - [_audioCaptureSession addInput:audioInput]; - - if ([_audioCaptureSession canAddOutput:_audioOutput]) { - [_audioCaptureSession addOutput:_audioOutput]; - _isAudioSetup = YES; - } else { - [self reportErrorMessage:@"Unable to add Audio input/output to session capture"]; - _isAudioSetup = NO; - } - } -} - -- (void)reportErrorMessage:(NSString *)errorMessage { - __weak typeof(self) weakSelf = self; - FLTEnsureToRunOnMainQueue(^{ - [weakSelf.dartAPI reportError:errorMessage - completion:^(FlutterError *error){ - // Ignore any errors, as this is just an event broadcast. - }]; - }); -} - @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 2a0a19b2f81..37650f62a53 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -65,13 +65,14 @@ NS_ASSUME_NONNULL_BEGIN @property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings; @property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory; @property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; +@property(assign, nonatomic) BOOL isAudioSetup; +/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. +@property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory; /// Initializes an `FLTCam` instance with the given configuration. /// @param error report to the caller if any error happened creating the camera. - (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; -- (void)setUpCaptureSessionForAudioIfNeeded; - // Methods exposed for the Swift DefaultCamera subclass - (void)updateOrientation; From 71d55a40915fc61e488c53979fe8931d6d259b7b Mon Sep 17 00:00:00 2001 From: Robert Odrowaz Date: Thu, 31 Jul 2025 08:27:16 +0200 Subject: [PATCH 3/3] Remove redundand audioOutput field and setSampleBufferDelegate call --- .../Sources/camera_avfoundation/DefaultCamera.swift | 1 - .../include/camera_avfoundation/FLTCam.h | 1 - 2 files changed, 2 deletions(-) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift index 69408765cf5..022ceb2367e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/DefaultCamera.swift @@ -343,7 +343,6 @@ final class DefaultCamera: FLTCam, Camera { newAudioWriterInput.expectsMediaDataInRealTime = true mediaSettingsAVWrapper.addInput(newAudioWriterInput, to: videoWriter) self.audioWriterInput = newAudioWriterInput - audioOutput.setSampleBufferDelegate(self, queue: captureSessionQueue) } if flashMode == .torch { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h index 37650f62a53..6cb0fdc4de0 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation_objc/include/camera_avfoundation/FLTCam.h @@ -64,7 +64,6 @@ NS_ASSUME_NONNULL_BEGIN @property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper; @property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings; @property(nonatomic, copy) InputPixelBufferAdaptorFactory inputPixelBufferAdaptorFactory; -@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; @property(assign, nonatomic) BOOL isAudioSetup; /// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests. @property(nonatomic, copy) AudioCaptureDeviceFactory audioCaptureDeviceFactory;