Skip to content
Merged
Next Next commit
enable more than 30 fps
  • Loading branch information
misos1 committed Aug 14, 2024
commit 6c769f9a535f3941b90c695ddbdfcb238cbbee06
6 changes: 5 additions & 1 deletion packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
## 0.9.17+2

* Adds possibility to use any supported FPS and fixes crash when using unsupported FPS.

## 0.9.17+1

* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO
* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO.

## 0.9.17

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter {

// Expect FPS configuration is passed to camera device.
[self waitForExpectations:@[
injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation,
injectedWrapper.beginConfigurationExpectation, injectedWrapper.lockExpectation,
injectedWrapper.minFrameDurationExpectation, injectedWrapper.maxFrameDurationExpectation,
injectedWrapper.commitConfigurationExpectation, injectedWrapper.unlockExpectation
injectedWrapper.unlockExpectation, injectedWrapper.commitConfigurationExpectation
]
timeout:1
enforceOrder:YES];
Expand Down Expand Up @@ -202,4 +202,20 @@ - (void)testSettings_ShouldBeSupportedByMethodCall {
XCTAssertNotNil(resultValue);
}

- (void)testSettings_ShouldSelectFormatWhichSupports60FPS {
FCPPlatformMediaSettings *settings =
[FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
framesPerSecond:@(60)
videoBitrate:@(gTestVideoBitrate)
audioBitrate:@(gTestAudioBitrate)
enableAudio:gTestEnableAudio];

FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
dispatch_queue_create("test", NULL), settings, nil, nil);

AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0];
XCTAssertLessThanOrEqual(range.minFrameRate, 60);
XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60);
}

@end
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,44 @@
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);

id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]);
OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3);
OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30);
id captureDeviceFormatMock1 = OCMClassMock([AVCaptureDeviceFormat class]);
OCMStub([captureDeviceFormatMock1 videoSupportedFrameRateRanges]).andReturn(@[
frameRateRangeMock1
]);

id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]);
OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3);
OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60);
id captureDeviceFormatMock2 = OCMClassMock([AVCaptureDeviceFormat class]);
OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[
frameRateRangeMock2
]);

id captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub([captureDeviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([captureDeviceMock formats]).andReturn((@[
captureDeviceFormatMock1, captureDeviceFormatMock2
]));
__block AVCaptureDeviceFormat *format = captureDeviceFormatMock1;
OCMStub([captureDeviceMock setActiveFormat:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
[invocation retainArguments];
[invocation getArgument:&format atIndex:2];
});
OCMStub([captureDeviceMock activeFormat]).andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&format];
});

id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings
mediaSettingsAVWrapper:mediaSettingsAVWrapper
orientation:UIDeviceOrientationPortrait
videoCaptureSession:videoSessionMock
audioCaptureSession:audioSessionMock
captureSessionQueue:captureSessionQueue
captureDeviceFactory:captureDeviceFactory ?: ^AVCaptureDevice *(void) {
return [AVCaptureDevice deviceWithUniqueID:@"camera"];
return captureDeviceMock;
}
videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) {
return CMVideoFormatDescriptionGetDimensions(format.formatDescription);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,20 +210,50 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
[_motionManager startAccelerometerUpdates];

if (_mediaSettings.framesPerSecond) {
[_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession];

// Possible values for presets are hard-coded in FLT interface having
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is all this code moving out of the locked section; is that safe?

Copy link
Contributor Author

@misos1 misos1 Sep 4, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did not see anything saying that the order of beginConfiguration and lockForConfiguration is important or that setting the session preset must be under that lock. There is example where it is used in that order: https://developer.apple.com/documentation/avfoundation/avcapturedevice/1389221-activeformat?language=objc

I moved setCaptureSessionPreset outside of lock because it can itself call lockForConfiguration and unlockForConfiguration and I did not see anything about whether that lock is recursive.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems lockForConfiguration is actually recursive from my observation (I could not get mentioned exceptions until now) as locking twice and unlocking once does not cause exceptions but unlocking twice yes. So I can revert this change (?).

Also it seems not all commitConfiguration calls go through _mediaSettingsAVWrapper.

// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
[_videoCaptureSession commitConfiguration];
return nil;
}

// The frame rate can be changed only on a locked for configuration device.
if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) {
[_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession];

// Possible values for presets are hard-coded in FLT interface having
// corresponding AVCaptureSessionPreset counterparts.
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
[_videoCaptureSession commitConfiguration];
[_captureDevice unlockForConfiguration];
return nil;
// find the format which frame rate ranges are closest to the wanted frame rate
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comments need to follow standard sentence rules. https://google.github.io/styleguide/objcguide.html#comments

CMVideoDimensions targetRes = self.videoDimensionsForFormat(_captureDevice.activeFormat);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You've added several calls to self in this init method. The fact that there was already such a call is a problem, and that should not be made worse by adding more.

If there are general, stateless utilities that you want to call from init, they should be turned into freestanding static functions.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please don't abbreviate variable names. https://google.github.io/styleguide/objcguide.html#naming

double targetFrameRate = _mediaSettings.framesPerSecond.doubleValue;
FourCharCode preferredSubType =
CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription);
AVCaptureDeviceFormat *bestFormat = _captureDevice.activeFormat;
double bestFrameRate = [self frameRateForFormat:bestFormat closestTo:targetFrameRate];
double minDistance = fabs(bestFrameRate - targetFrameRate);
int bestSubTypeScore = 1;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you also move this to a helper function?

CMVideoDimensions res = self.videoDimensionsForFormat(format);
if (res.width != targetRes.width || res.height != targetRes.height) {
continue;
}
double frameRate = [self frameRateForFormat:format closestTo:targetFrameRate];
double distance = fabs(frameRate - targetFrameRate);
FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int subTypeScore = subType == preferredSubType ? 1 : 0;
if (distance < minDistance ||
(distance == minDistance && subTypeScore > bestSubTypeScore)) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm confused by this condition. bestSubTypeScore is initialized to 1, and subTypeScore is always either 0 or 1. How could this last piece of the condition ever be true?

I'm not clear on what this score even is; this whole algorithm needs some explanatory comment at the beginning.

Copy link
Contributor Author

@misos1 misos1 Aug 30, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if distance < minDistance then bestSubTypeScore can be overwritten by 0. This line explains it: subTypeScore = subType == preferredSubType ? 1 : 0, the score of the subtype is 1 when it is the preferred subtype and 0 otherwise. This is how to choose by minimum distance and then from formats which have equal distance choose that one with preferred subtype, it gives to subtypes some order. Instead of subTypeScore > bestSubTypeScore there could be something like bestSubType != preferredSubType && subType == preferredSubType but that makes it less clear as it deviates from simple way how to order something by multiple properties current.A < other.A || (current.A == other.A && current.B < other.B).

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but that makes it less clear

I strongly disagree that abstracting the thing that matters—whether the current type is a preferred type—behind a completely arbitrary numeric value makes this code easier to understand. The code you have there that you described as "less clear" makes sense to me when I read it, whereas I did not understand what this score was, or why something that could only have two possible values was being called a score in the first place.

bestFormat = format;
bestFrameRate = frameRate;
minDistance = distance;
bestSubTypeScore = subTypeScore;
}
}
if (![bestFormat isEqual:_captureDevice.activeFormat]) {
_captureDevice.activeFormat = bestFormat;
}
_mediaSettings.framesPerSecond = @(bestFrameRate);

// Set frame rate with 1/10 precision allowing not integral values.
int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0);
Expand All @@ -232,9 +262,10 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
[mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice];
[mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice];

[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
[_mediaSettingsAVWrapper unlockDevice:_captureDevice];
[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
} else {
[_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession];
return nil;
}
} else {
Expand All @@ -250,6 +281,20 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
return self;
}

- (double)frameRateForFormat:(AVCaptureDeviceFormat *)format closestTo:(double)targetFrameRate {
double bestFrameRate = 0;
double minDistance = DBL_MAX;
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate);
double distance = fabs(frameRate - targetFrameRate);
if (distance < minDistance) {
bestFrameRate = frameRate;
minDistance = distance;
}
}
return bestFrameRate;
}

- (AVCaptureConnection *)createConnection:(NSError **)error {
// Setup video capture input.
_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error];
Expand Down Expand Up @@ -543,16 +588,23 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
/// Finds the highest available resolution in terms of pixel count for the given device.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: "... if same pixel count, use the subtype as the tie breaker."

- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice:
(AVCaptureDevice *)captureDevice {
FourCharCode preferredSubType =
CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription);
AVCaptureDeviceFormat *bestFormat = nil;
NSUInteger maxPixelCount = 0;
int bestSubTypeScore = 0;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
CMVideoDimensions res = self.videoDimensionsForFormat(format);
NSUInteger height = res.height;
NSUInteger width = res.width;
NSUInteger pixelCount = height * width;
if (pixelCount > maxPixelCount) {
maxPixelCount = pixelCount;
FourCharCode subType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int subTypeScore = subType == preferredSubType ? 1 : 0;
if (pixelCount > maxPixelCount ||
(pixelCount == maxPixelCount && subTypeScore > bestSubTypeScore)) {
bestFormat = format;
maxPixelCount = pixelCount;
bestSubTypeScore = subTypeScore;
}
}
return bestFormat;
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_avfoundation/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
version: 0.9.17+1
version: 0.9.17+2

environment:
sdk: ^3.2.3
Expand Down
5 changes: 5 additions & 0 deletions packages/video_player/video_player_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 2.6.2

* Adds possibility to play videos at more than 30 FPS.
* Fixes playing state not updating in some paths.

## 2.6.1

* Adds files to make include directory permanent.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ @interface StubFVPDisplayLinkFactory : NSObject <FVPDisplayLinkFactory>

/** This display link to return. */
@property(nonatomic, strong) FVPDisplayLink *displayLink;
@property(nonatomic) void (^fireDisplayLink)(void);

- (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink;

Expand All @@ -138,6 +139,7 @@ - (instancetype)initWithDisplayLink:(FVPDisplayLink *)displayLink {
}
- (FVPDisplayLink *)displayLinkWithRegistrar:(id<FlutterPluginRegistrar>)registrar
callback:(void (^)(void))callback {
self.fireDisplayLink = callback;
return self.displayLink;
}

Expand Down Expand Up @@ -243,13 +245,14 @@ - (void)testSeekToWhilePausedStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -294,14 +297,15 @@ - (void)testInitStartsDisplayLinkTemporarily {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since a frame was found, and the video is paused, the display link should be paused again.
OCMVerify([mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -357,13 +361,14 @@ - (void)testSeekToWhilePlayingDoesNotStopDisplayLink {
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andReturn(YES);
// Any non-zero value is fine here since it won't actually be used, just NULL-checked.
CVPixelBufferRef fakeBufferRef = (CVPixelBufferRef)1;
CVPixelBufferRef bufferRef;
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero itemTimeForDisplay:NULL])
.ignoringNonObjectArgs()
.andReturn(fakeBufferRef);
.andReturn(bufferRef);
// Simulate a callback from the engine to request a new frame.
[player copyPixelBuffer];
stubDisplayLinkFactory.fireDisplayLink();
CFRelease([player copyPixelBuffer]);
// Since the video was playing, the display link should not be paused after getting a buffer.
OCMVerify(never(), [mockDisplayLink setRunning:NO]);
}
Expand Down Expand Up @@ -790,6 +795,82 @@ - (void)testPublishesInRegistration {
XCTAssertTrue([publishedValue isKindOfClass:[FVPVideoPlayerPlugin class]]);
}

- (void)testPlayerShouldNotDropEverySecondFrame {
NSObject<FlutterPluginRegistrar> *registrar =
[GetPluginRegistry() registrarForPlugin:@"testPlayerShouldNotDropEverySecondFrame"];
NSObject<FlutterPluginRegistrar> *partialRegistrar = OCMPartialMock(registrar);
NSObject<FlutterTextureRegistry> *mockTextureRegistry =
OCMProtocolMock(@protocol(FlutterTextureRegistry));
OCMStub([partialRegistrar textures]).andReturn(mockTextureRegistry);

FVPDisplayLink *displayLink = [[FVPDisplayLink alloc] initWithRegistrar:registrar
callback:^(){
}];
StubFVPDisplayLinkFactory *stubDisplayLinkFactory =
[[StubFVPDisplayLinkFactory alloc] initWithDisplayLink:displayLink];
AVPlayerItemVideoOutput *mockVideoOutput = OCMPartialMock([[AVPlayerItemVideoOutput alloc] init]);
FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc]
initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil output:mockVideoOutput]
displayLinkFactory:stubDisplayLinkFactory
registrar:partialRegistrar];

FlutterError *error;
[videoPlayerPlugin initialize:&error];
XCTAssertNil(error);
FVPCreationOptions *create = [FVPCreationOptions
makeWithAsset:nil
uri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"
packageName:nil
formatHint:nil
httpHeaders:@{}];
NSNumber *textureId = [videoPlayerPlugin createWithOptions:create error:&error];
FVPVideoPlayer *player = videoPlayerPlugin.playersByTextureId[textureId];

__block CMTime currentTime = kCMTimeZero;
OCMStub([mockVideoOutput itemTimeForHostTime:0])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
[invocation setReturnValue:&currentTime];
});
__block NSMutableSet *pixelBuffers = NSMutableSet.new;
OCMStub([mockVideoOutput hasNewPixelBufferForItemTime:kCMTimeZero])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
BOOL has = [pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&has];
});
OCMStub([mockVideoOutput copyPixelBufferForItemTime:kCMTimeZero
itemTimeForDisplay:[OCMArg anyPointer]])
.ignoringNonObjectArgs()
.andDo(^(NSInvocation *invocation) {
CMTime itemTime;
[invocation getArgument:&itemTime atIndex:2];
CVPixelBufferRef bufferRef = NULL;
if ([pixelBuffers containsObject:[NSValue valueWithCMTime:itemTime]]) {
CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef);
}
[pixelBuffers removeObject:[NSValue valueWithCMTime:itemTime]];
[invocation setReturnValue:&bufferRef];
});
void (^advanceFrame)(void) = ^{
currentTime.value++;
[pixelBuffers addObject:[NSValue valueWithCMTime:currentTime]];
};

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);

advanceFrame();
OCMExpect([mockTextureRegistry textureFrameAvailable:textureId.intValue]);
CFRelease([player copyPixelBuffer]);
stubDisplayLinkFactory.fireDisplayLink();
OCMVerifyAllWithDelay(mockTextureRegistry, 10);
}

#if TARGET_OS_IOS
- (void)validateTransformFixForOrientation:(UIImageOrientation)orientation {
AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation];
Expand Down
Loading