Skip to content

Commit 9f73f5a

Browse files
huulbaekPark Sung Min
authored andcommitted
[camera] Pause/resume video recording for Android (flutter#1370)
* Pause/resume video recording for Android * Specify type * Add pausing and resuming to example app * iOS side of pausing/resuming * More documentation * Version bump * Add video pausing and resuming * get pausing and recording to work for no audio * It works * Formatting * Add test for pausing and resuming * Call success outside try catch block * formatting * Disable audio in test and call result on iOS
1 parent 5f1d31e commit 9f73f5a

File tree

8 files changed

+331
-12
lines changed

8 files changed

+331
-12
lines changed

packages/camera/CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
## 0.5.4
2+
3+
* Add feature to pause and resume video recording.
4+
15
## 0.5.3+1
26

37
* Fix too large request code for FragmentActivity users.

packages/camera/android/src/main/java/io/flutter/plugins/camera/Camera.java

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -388,6 +388,38 @@ public void stopVideoRecording(@NonNull final Result result) {
388388
}
389389
}
390390

391+
public void pauseVideoRecording(@NonNull final Result result) {
392+
if (!recordingVideo) {
393+
result.success(null);
394+
return;
395+
}
396+
397+
try {
398+
mediaRecorder.pause();
399+
} catch (IllegalStateException e) {
400+
result.error("videoRecordingFailed", e.getMessage(), null);
401+
return;
402+
}
403+
404+
result.success(null);
405+
}
406+
407+
public void resumeVideoRecording(@NonNull final Result result) {
408+
if (!recordingVideo) {
409+
result.success(null);
410+
return;
411+
}
412+
413+
try {
414+
mediaRecorder.resume();
415+
} catch (IllegalStateException e) {
416+
result.error("videoRecordingFailed", e.getMessage(), null);
417+
return;
418+
}
419+
420+
result.success(null);
421+
}
422+
391423
public void startPreview() throws CameraAccessException {
392424
createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
393425
}

packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,16 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result)
112112
camera.stopVideoRecording(result);
113113
break;
114114
}
115+
case "pauseVideoRecording":
116+
{
117+
camera.pauseVideoRecording(result);
118+
break;
119+
}
120+
case "resumeVideoRecording":
121+
{
122+
camera.resumeVideoRecording(result);
123+
break;
124+
}
115125
case "startImageStream":
116126
{
117127
try {

packages/camera/example/lib/main.dart

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -214,6 +214,19 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
214214
? onVideoRecordButtonPressed
215215
: null,
216216
),
217+
IconButton(
218+
icon: controller != null && controller.value.isRecordingPaused
219+
? Icon(Icons.play_arrow)
220+
: Icon(Icons.pause),
221+
color: Colors.blue,
222+
onPressed: controller != null &&
223+
controller.value.isInitialized &&
224+
controller.value.isRecordingVideo
225+
? (controller != null && controller.value.isRecordingPaused
226+
? onResumeButtonPressed
227+
: onPauseButtonPressed)
228+
: null,
229+
),
217230
IconButton(
218231
icon: const Icon(Icons.stop),
219232
color: Colors.red,
@@ -316,6 +329,20 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
316329
});
317330
}
318331

332+
void onPauseButtonPressed() {
333+
pauseVideoRecording().then((_) {
334+
if (mounted) setState(() {});
335+
showInSnackBar('Video recording paused');
336+
});
337+
}
338+
339+
void onResumeButtonPressed() {
340+
resumeVideoRecording().then((_) {
341+
if (mounted) setState(() {});
342+
showInSnackBar('Video recording resumed');
343+
});
344+
}
345+
319346
Future<String> startVideoRecording() async {
320347
if (!controller.value.isInitialized) {
321348
showInSnackBar('Error: select a camera first.');
@@ -357,6 +384,32 @@ class _CameraExampleHomeState extends State<CameraExampleHome>
357384
await _startVideoPlayer();
358385
}
359386

387+
Future<void> pauseVideoRecording() async {
388+
if (!controller.value.isRecordingVideo) {
389+
return null;
390+
}
391+
392+
try {
393+
await controller.pauseVideoRecording();
394+
} on CameraException catch (e) {
395+
_showCameraException(e);
396+
return null;
397+
}
398+
}
399+
400+
Future<void> resumeVideoRecording() async {
401+
if (!controller.value.isRecordingVideo) {
402+
return null;
403+
}
404+
405+
try {
406+
await controller.resumeVideoRecording();
407+
} on CameraException catch (e) {
408+
_showCameraException(e);
409+
return null;
410+
}
411+
}
412+
360413
Future<void> _startVideoPlayer() async {
361414
final VideoPlayerController vcontroller =
362415
VideoPlayerController.file(File(videoPath));

packages/camera/example/test_driver/camera.dart

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,4 +143,60 @@ void main() {
143143
}
144144
}
145145
});
146+
147+
test('Pause and resume video recording', () async {
148+
final List<CameraDescription> cameras = await availableCameras();
149+
if (cameras.isEmpty) {
150+
return;
151+
}
152+
153+
final CameraController controller = CameraController(
154+
cameras[0],
155+
ResolutionPreset.low,
156+
enableAudio: false,
157+
);
158+
159+
await controller.initialize();
160+
await controller.prepareForVideoRecording();
161+
162+
final String filePath =
163+
'${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.mp4';
164+
165+
int startPause;
166+
int timePaused = 0;
167+
168+
await controller.startVideoRecording(filePath);
169+
final int recordingStart = DateTime.now().millisecondsSinceEpoch;
170+
sleep(const Duration(milliseconds: 500));
171+
172+
await controller.pauseVideoRecording();
173+
startPause = DateTime.now().millisecondsSinceEpoch;
174+
sleep(const Duration(milliseconds: 500));
175+
await controller.resumeVideoRecording();
176+
timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
177+
178+
sleep(const Duration(milliseconds: 500));
179+
180+
await controller.pauseVideoRecording();
181+
startPause = DateTime.now().millisecondsSinceEpoch;
182+
sleep(const Duration(milliseconds: 500));
183+
await controller.resumeVideoRecording();
184+
timePaused += DateTime.now().millisecondsSinceEpoch - startPause;
185+
186+
sleep(const Duration(milliseconds: 500));
187+
188+
await controller.stopVideoRecording();
189+
final int recordingTime =
190+
DateTime.now().millisecondsSinceEpoch - recordingStart;
191+
192+
final File videoFile = File(filePath);
193+
final VideoPlayerController videoController = VideoPlayerController.file(
194+
videoFile,
195+
);
196+
await videoController.initialize();
197+
final int duration = videoController.value.duration.inMilliseconds;
198+
await videoController.dispose();
199+
200+
expect(duration, lessThan(recordingTime - timePaused));
201+
});
146202
}

packages/camera/ios/Classes/CameraPlugin.m

Lines changed: 105 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -180,10 +180,18 @@ @interface FLTCam : NSObject <FlutterTexture,
180180
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
181181
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
182182
@property(assign, nonatomic) BOOL isRecording;
183+
@property(assign, nonatomic) BOOL isRecordingPaused;
184+
@property(assign, nonatomic) BOOL videoIsDisconnected;
185+
@property(assign, nonatomic) BOOL audioIsDisconnected;
183186
@property(assign, nonatomic) BOOL isAudioSetup;
184187
@property(assign, nonatomic) BOOL isStreamingImages;
185188
@property(assign, nonatomic) ResolutionPreset resolutionPreset;
189+
@property(assign, nonatomic) CMTime lastVideoSampleTime;
190+
@property(assign, nonatomic) CMTime lastAudioSampleTime;
191+
@property(assign, nonatomic) CMTime videoTimeOffset;
192+
@property(assign, nonatomic) CMTime audioTimeOffset;
186193
@property(nonatomic) CMMotionManager *motionManager;
194+
@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
187195
- (instancetype)initWithCameraName:(NSString *)cameraName
188196
resolutionPreset:(NSString *)resolutionPreset
189197
enableAudio:(BOOL)enableAudio
@@ -417,25 +425,89 @@ - (void)captureOutput:(AVCaptureOutput *)output
417425
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
418426
}
419427
}
420-
if (_isRecording) {
428+
if (_isRecording && !_isRecordingPaused) {
421429
if (_videoWriter.status == AVAssetWriterStatusFailed) {
422430
_eventSink(@{
423431
@"event" : @"error",
424432
@"errorDescription" : [NSString stringWithFormat:@"%@", _videoWriter.error]
425433
});
426434
return;
427435
}
428-
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
436+
437+
CFRetain(sampleBuffer);
438+
CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
439+
429440
if (_videoWriter.status != AVAssetWriterStatusWriting) {
430441
[_videoWriter startWriting];
431-
[_videoWriter startSessionAtSourceTime:lastSampleTime];
442+
[_videoWriter startSessionAtSourceTime:currentSampleTime];
432443
}
444+
433445
if (output == _captureVideoOutput) {
434-
[self newVideoSample:sampleBuffer];
435-
} else if (output == _audioOutput) {
446+
if (_videoIsDisconnected) {
447+
_videoIsDisconnected = NO;
448+
449+
if (_videoTimeOffset.value == 0) {
450+
_videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
451+
} else {
452+
CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
453+
_videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
454+
}
455+
456+
return;
457+
}
458+
459+
_lastVideoSampleTime = currentSampleTime;
460+
461+
CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
462+
CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
463+
[_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
464+
} else {
465+
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
466+
467+
if (dur.value > 0) {
468+
currentSampleTime = CMTimeAdd(currentSampleTime, dur);
469+
}
470+
471+
if (_audioIsDisconnected) {
472+
_audioIsDisconnected = NO;
473+
474+
if (_audioTimeOffset.value == 0) {
475+
_audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
476+
} else {
477+
CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
478+
_audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
479+
}
480+
481+
return;
482+
}
483+
484+
_lastAudioSampleTime = currentSampleTime;
485+
486+
if (_audioTimeOffset.value != 0) {
487+
CFRelease(sampleBuffer);
488+
sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
489+
}
490+
436491
[self newAudioSample:sampleBuffer];
437492
}
493+
494+
CFRelease(sampleBuffer);
495+
}
496+
}
497+
498+
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
499+
CMItemCount count;
500+
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
501+
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
502+
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
503+
for (CMItemCount i = 0; i < count; i++) {
504+
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
505+
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
438506
}
507+
CMSampleBufferRef sout;
508+
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
509+
free(pInfo);
510+
return sout;
439511
}
440512

441513
- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
@@ -526,6 +598,11 @@ - (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result
526598
return;
527599
}
528600
_isRecording = YES;
601+
_isRecordingPaused = NO;
602+
_videoTimeOffset = CMTimeMake(0, 1);
603+
_audioTimeOffset = CMTimeMake(0, 1);
604+
_videoIsDisconnected = NO;
605+
_audioIsDisconnected = NO;
529606
result(nil);
530607
} else {
531608
_eventSink(@{@"event" : @"error", @"errorDescription" : @"Video is already recording!"});
@@ -556,6 +633,16 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result {
556633
}
557634
}
558635

636+
- (void)pauseVideoRecording {
637+
_isRecordingPaused = YES;
638+
_videoIsDisconnected = YES;
639+
_audioIsDisconnected = YES;
640+
}
641+
642+
- (void)resumeVideoRecording {
643+
_isRecordingPaused = NO;
644+
}
645+
559646
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
560647
if (!_isStreamingImages) {
561648
FlutterEventChannel *eventChannel =
@@ -608,6 +695,13 @@ - (BOOL)setupWriterForPath:(NSString *)path {
608695
nil];
609696
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
610697
outputSettings:videoSettings];
698+
699+
_videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
700+
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
701+
sourcePixelBufferAttributes:@{
702+
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)
703+
}];
704+
611705
NSParameterAssert(_videoWriterInput);
612706
_videoWriterInput.expectsMediaDataInRealTime = YES;
613707

@@ -777,6 +871,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
777871
} else if ([@"stopImageStream" isEqualToString:call.method]) {
778872
[_camera stopImageStream];
779873
result(nil);
874+
} else if ([@"pauseVideoRecording" isEqualToString:call.method]) {
875+
[_camera pauseVideoRecording];
876+
result(nil);
877+
} else if ([@"resumeVideoRecording" isEqualToString:call.method]) {
878+
[_camera resumeVideoRecording];
879+
result(nil);
780880
} else {
781881
NSDictionary *argsMap = call.arguments;
782882
NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue;

0 commit comments

Comments
 (0)