Skip to content

Commit 69f36e6

Browse files
authored
[iOS] Robustness for video renderer. (flutter-webrtc#1751)
* Robustness for video renderer. * update.
1 parent a7798b0 commit 69f36e6

File tree

6 files changed

+69
-35
lines changed

6 files changed

+69
-35
lines changed

common/darwin/Classes/FlutterRTCMediaStream.m

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -65,35 +65,38 @@ - (RTCMediaConstraints*)defaultMediaStreamConstraints {
6565

6666

6767
- (NSArray<AVCaptureDevice*> *) captureDevices {
68-
NSArray<AVCaptureDeviceType> *deviceTypes = @[
68+
if (@available(iOS 13.0, macOS 10.15, macCatalyst 14.0, tvOS 17.0, *)) {
69+
NSArray<AVCaptureDeviceType> *deviceTypes = @[
6970
#if TARGET_OS_IPHONE
70-
AVCaptureDeviceTypeBuiltInTripleCamera,
71-
AVCaptureDeviceTypeBuiltInDualCamera,
72-
AVCaptureDeviceTypeBuiltInDualWideCamera,
73-
AVCaptureDeviceTypeBuiltInWideAngleCamera,
74-
AVCaptureDeviceTypeBuiltInTelephotoCamera,
75-
AVCaptureDeviceTypeBuiltInUltraWideCamera,
71+
AVCaptureDeviceTypeBuiltInTripleCamera,
72+
AVCaptureDeviceTypeBuiltInDualCamera,
73+
AVCaptureDeviceTypeBuiltInDualWideCamera,
74+
AVCaptureDeviceTypeBuiltInWideAngleCamera,
75+
AVCaptureDeviceTypeBuiltInTelephotoCamera,
76+
AVCaptureDeviceTypeBuiltInUltraWideCamera,
7677
#else
77-
AVCaptureDeviceTypeBuiltInWideAngleCamera,
78+
AVCaptureDeviceTypeBuiltInWideAngleCamera,
7879
#endif
79-
];
80-
80+
];
81+
8182
#if !defined(TARGET_OS_IPHONE)
82-
if (@available(macOS 13.0, *)) {
83-
deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera];
84-
}
83+
if (@available(macOS 13.0, *)) {
84+
deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera];
85+
}
8586
#endif
8687

87-
if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) {
88-
deviceTypes = [deviceTypes arrayByAddingObjectsFromArray: @[
89-
AVCaptureDeviceTypeContinuityCamera,
90-
AVCaptureDeviceTypeExternal,
91-
]];
92-
}
88+
if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) {
89+
deviceTypes = [deviceTypes arrayByAddingObjectsFromArray: @[
90+
AVCaptureDeviceTypeContinuityCamera,
91+
AVCaptureDeviceTypeExternal,
92+
]];
93+
}
9394

94-
return [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
95-
mediaType:AVMediaTypeVideo
96-
position:AVCaptureDevicePositionUnspecified].devices;
95+
return [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes
96+
mediaType:AVMediaTypeVideo
97+
position:AVCaptureDevicePositionUnspecified].devices;
98+
}
99+
return @[];
97100
}
98101

99102
/**

common/darwin/Classes/FlutterRTCVideoRenderer.m

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ @implementation FlutterRTCVideoRenderer {
2525
@synthesize textureId = _textureId;
2626
@synthesize registry = _registry;
2727
@synthesize eventSink = _eventSink;
28+
@synthesize videoTrack = _videoTrack;
2829

2930
- (instancetype)initWithTextureRegistry:(id<FlutterTextureRegistry>)registry
3031
messenger:(NSObject<FlutterBinaryMessenger>*)messenger {
@@ -75,13 +76,14 @@ - (void)dispose {
7576

7677
- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
7778
RTCVideoTrack* oldValue = self.videoTrack;
78-
7979
if (oldValue != videoTrack) {
80+
os_unfair_lock_lock(&_lock);
81+
_videoTrack = videoTrack;
82+
os_unfair_lock_unlock(&_lock);
8083
_isFirstFrameRendered = false;
8184
if (oldValue) {
8285
[oldValue removeRenderer:self];
8386
}
84-
_videoTrack = videoTrack;
8587
_frameSize = CGSizeZero;
8688
_renderSize = CGSizeZero;
8789
_rotation = -1;
@@ -192,6 +194,10 @@ - (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer
192194
- (void)renderFrame:(RTCVideoFrame*)frame {
193195

194196
os_unfair_lock_lock(&_lock);
197+
if(_videoTrack == nil) {
198+
os_unfair_lock_unlock(&_lock);
199+
return;
200+
}
195201
if(!_frameAvailable && _pixelBufferRef) {
196202
[self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame];
197203
if(_textureId != -1) {

common/darwin/Classes/FlutterWebRTCPlugin.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler);
4949
@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer;
5050
@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput;
5151

52-
@property(nonatomic, strong) NSString *focusMode;
53-
@property(nonatomic, strong) NSString *exposureMode;
52+
@property(nonatomic, strong) NSString * _Nonnull focusMode;
53+
@property(nonatomic, strong) NSString * _Nonnull exposureMode;
5454

5555
@property(nonatomic) BOOL _usingFrontCamera;
5656
@property(nonatomic) NSInteger _lastTargetWidth;

common/darwin/Classes/FlutterWebRTCPlugin.m

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -585,6 +585,10 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
585585
for (RTCVideoTrack* track in stream.videoTracks) {
586586
[_localTracks removeObjectForKey:track.trackId];
587587
RTCVideoTrack* videoTrack = (RTCVideoTrack*)track;
588+
FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:videoTrack.trackId];
589+
if(renderer != nil) {
590+
renderer.videoTrack = nil;
591+
}
588592
CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[videoTrack.trackId];
589593
if (stopHandler) {
590594
shouldCallResult = NO;
@@ -699,6 +703,10 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
699703
if (audioTrack) {
700704
[self ensureAudioSession];
701705
}
706+
FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:trackId];
707+
if(renderer != nil) {
708+
renderer.videoTrack = nil;
709+
}
702710
result(nil);
703711
} else if ([@"restartIce" isEqualToString:call.method]) {
704712
NSDictionary* argsMap = call.arguments;
@@ -746,9 +754,11 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
746754
NSDictionary* argsMap = call.arguments;
747755
NSNumber* textureId = argsMap[@"textureId"];
748756
FlutterRTCVideoRenderer* render = self.renders[textureId];
749-
render.videoTrack = nil;
750-
[render dispose];
751-
[self.renders removeObjectForKey:textureId];
757+
if(render != nil) {
758+
render.videoTrack = nil;
759+
[render dispose];
760+
[self.renders removeObjectForKey:textureId];
761+
}
752762
result(nil);
753763
} else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]) {
754764
NSDictionary* argsMap = call.arguments;
@@ -826,8 +836,10 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
826836
NSDictionary* argsMap = call.arguments;
827837
NSNumber* viewId = argsMap[@"viewId"];
828838
FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId];
829-
render.videoTrack = nil;
830-
[_platformViewFactory.renders removeObjectForKey:viewId];
839+
if(render != nil) {
840+
render.videoTrack = nil;
841+
[_platformViewFactory.renders removeObjectForKey:viewId];
842+
}
831843
result(nil);
832844
}
833845
#endif
@@ -2283,4 +2295,13 @@ - (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver {
22832295
};
22842296
return params;
22852297
}
2298+
2299+
- (FlutterRTCVideoRenderer *)findRendererByTrackId:(NSString *)trackId {
2300+
for (FlutterRTCVideoRenderer *renderer in self.renders.allValues) {
2301+
if (renderer.videoTrack != nil && [renderer.videoTrack.trackId isEqualToString:trackId]) {
2302+
return renderer;
2303+
}
2304+
}
2305+
return nil;
2306+
}
22862307
@end

ios/Classes/FlutterRTCVideoPlatformView.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
1212

13-
- (instancetype)initWithFrame:(CGRect)frame;
13+
- (instancetype _Nonnull)initWithFrame:(CGRect)frame;
1414

1515
- (void)setSize:(CGSize)size;
1616

ios/Classes/FlutterRTCVideoPlatformView.m

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,13 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
4949

5050
CMSampleBufferRef sampleBuffer = [self sampleBufferFromPixelBuffer:pixelBuffer];
5151
if (sampleBuffer) {
52-
if([_videoLayer requiresFlushToResumeDecoding]) {
53-
[_videoLayer flushAndRemoveImage];
54-
}
52+
if (@available(iOS 14.0, *)) {
53+
if([_videoLayer requiresFlushToResumeDecoding]) {
54+
[_videoLayer flushAndRemoveImage];
55+
}
56+
} else {
57+
// Fallback on earlier versions
58+
}
5559
[_videoLayer enqueueSampleBuffer:sampleBuffer];
5660
CFRelease(sampleBuffer);
5761
}

0 commit comments

Comments
 (0)