Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
c3f703e
Start of Android side of byte stream passing
bparrishMines Oct 24, 2018
f310580
dart side of byte streaming
bparrishMines Oct 25, 2018
87fdb60
Fix android streaming
bparrishMines Oct 25, 2018
673b003
Merge branch 'master' of github.com:flutter/plugins into camera_android
bparrishMines Nov 14, 2018
8d353ad
Add ios byte streaming
bparrishMines Nov 15, 2018
0291a29
Convert buffer to uiimage to pass over
bparrishMines Nov 16, 2018
31f746a
formatting
bparrishMines Nov 18, 2018
5fcfeb2
Stream yuv bytes instead
bparrishMines Nov 28, 2018
040d1ae
Make video format a constant
bparrishMines Nov 29, 2018
16d1d32
Pass back metadata for ios image
bparrishMines Nov 29, 2018
8e88978
Pass back metadata for android image
bparrishMines Nov 29, 2018
67f8304
Dart code now parses camera image buffer
bparrishMines Nov 30, 2018
646283a
YUV image to bgra
bparrishMines Nov 30, 2018
7e99691
Add documentation
bparrishMines Dec 6, 2018
4cbfab9
Only pass available data on Android
bparrishMines Dec 6, 2018
aa0e263
Merge branch 'master' of github.com:bparrishMines/plugins into camera…
bparrishMines Dec 6, 2018
9b2ae22
Merge branch 'master' of github.com:flutter/plugins into camera_andro…
bparrishMines Dec 6, 2018
297fe7a
Bump version
bparrishMines Dec 6, 2018
bdd9007
Formatting
bparrishMines Dec 6, 2018
ccc057b
create imageformat error
bparrishMines Dec 7, 2018
989edf6
Don't return from null
bparrishMines Dec 7, 2018
22ce601
Merge branch 'master' of github.com:bparrishMines/plugins into camera…
bparrishMines Dec 10, 2018
b1d7b89
Init buffers in constructor
bparrishMines Dec 10, 2018
aa3db0c
Add yuv ios format
bparrishMines Dec 11, 2018
0349ae2
Used presets with defined resolution. Sometimes resolution would come…
bparrishMines Dec 11, 2018
2633c49
Formatting
bparrishMines Dec 13, 2018
6377c64
Move CameraImage classes to separate file
bparrishMines Dec 13, 2018
6660a68
Move camera.dart to src folder
bparrishMines Dec 13, 2018
a4d278e
Create camera library
bparrishMines Dec 13, 2018
045dd53
Better name and comments
bparrishMines Dec 19, 2018
01520fe
Change from library camera file
bparrishMines Dec 19, 2018
6155620
bytestream -> imagestream
bparrishMines Dec 19, 2018
5898b4c
Comments and names
bparrishMines Dec 20, 2018
7617bb9
Formatting
bparrishMines Dec 20, 2018
f18db98
Added resolution and fps todo
bparrishMines Dec 20, 2018
0bf466a
Unmodify file
bparrishMines Dec 20, 2018
a53222b
Empty commit to rerun tests
bparrishMines Dec 20, 2018
fe965d1
Remove TODO from documentation
bparrishMines Dec 20, 2018
7c0228f
Merge branch 'master' of github.com:flutter/plugins into camera_andro…
bparrishMines Jan 3, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Init buffers in constructor
  • Loading branch information
bparrishMines committed Dec 10, 2018
commit b1d7b89a1b6399d54339030089ed095069383429
2 changes: 1 addition & 1 deletion packages/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
## 0.2.7

* Add byte streaming capability for the camera
* Add byte streaming capability for the camera.

## 0.2.6

Expand Down
91 changes: 43 additions & 48 deletions packages/camera/ios/Classes/CameraPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ @interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
@end

@interface FLTByteStreamHandler : NSObject <FlutterStreamHandler>
@property(readonly, nonatomic) FlutterEventSink eventSink;
@property FlutterEventSink eventSink;
@end

@implementation FLTByteStreamHandler {
Expand Down Expand Up @@ -104,6 +104,8 @@ @interface FLTCam : NSObject <FlutterTexture,
@property(assign, nonatomic) BOOL isRecording;
@property(assign, nonatomic) BOOL isAudioSetup;
@property(assign, nonatomic) BOOL isStreamingBytes;
@property(nonatomic) vImage_Buffer destinationBuffer;
@property(nonatomic) vImage_Buffer conversionBuffer;
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
error:(NSError **)error;
Expand All @@ -119,8 +121,6 @@ - (void)captureToFile:(NSString *)filename result:(FlutterResult)result;

@implementation FLTCam
FourCharCode const videoFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Format that I believe is recommended for iOS 10+, which is minimum requirement for this plugin.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add this explanation as a comment for future readers to understand the code.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

vImage_Buffer destinationBuffer;
vImage_Buffer conversionBuffer;

- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
Expand Down Expand Up @@ -151,6 +151,9 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
CMVideoFormatDescriptionGetDimensions([[_captureDevice activeFormat] formatDescription]);
_previewSize = CGSizeMake(dimensions.width, dimensions.height);

vImageBuffer_Init(&_destinationBuffer, 1280, 720, 32, kvImageNoFlags);
vImageBuffer_Init(&_conversionBuffer, 1280, 720, 32, kvImageNoFlags);

_captureVideoOutput = [AVCaptureVideoDataOutput new];
_captureVideoOutput.videoSettings =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
Expand Down Expand Up @@ -213,43 +216,45 @@ - (void)captureOutput:(AVCaptureOutput *)output
});
return;
}
if (_isStreamingBytes && _byteStreamHandler.eventSink) {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
if (_isStreamingBytes) {
if (_byteStreamHandler.eventSink) {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);

NSMutableArray *planes = [NSMutableArray array];
NSMutableArray *planes = [NSMutableArray array];

size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
for (int i = 0; i < planeCount; i++) {
void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);
size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
for (int i = 0; i < planeCount; i++) {
void *planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);

NSNumber *length = @(bytesPerRow * height);
NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];
NSNumber *length = @(bytesPerRow * height);
NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];

NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
planeBuffer[@"width"] = @(width);
planeBuffer[@"height"] = @(height);
planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];
NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
planeBuffer[@"width"] = @(width);
planeBuffer[@"height"] = @(height);
planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];

[planes addObject:planeBuffer];
}
[planes addObject:planeBuffer];
}

NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
imageBuffer[@"format"] = @(videoFormat);
imageBuffer[@"planes"] = planes;
NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
imageBuffer[@"format"] = @(videoFormat);
imageBuffer[@"planes"] = planes;

_byteStreamHandler.eventSink(imageBuffer);
_byteStreamHandler.eventSink(imageBuffer);

CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
if (_isRecording) {
if (_videoWriter.status == AVAssetWriterStatusFailed) {
Expand Down Expand Up @@ -336,10 +341,10 @@ - (CVPixelBufferRef)copyPixelBuffer {
pixelBuffer = _latestPixelBuffer;
}

return [self convertYUVImageTOBGRA:pixelBuffer];
return [self convertYUVImageToBGRA:pixelBuffer];
}

- (CVPixelBufferRef)convertYUVImageTOBGRA:(CVPixelBufferRef)pixelBuffer {
- (CVPixelBufferRef)convertYUVImageToBGRA:(CVPixelBufferRef)pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

vImage_YpCbCrToARGB infoYpCbCrToARGB;
Expand Down Expand Up @@ -369,30 +374,20 @@ - (CVPixelBufferRef)convertYUVImageTOBGRA:(CVPixelBufferRef)pixelBuffer {
sourceChromaBuffer.width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
sourceChromaBuffer.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);

if (!destinationBuffer.height) {
vImageBuffer_Init(&destinationBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32,
kvImageNoFlags);
}

vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &destinationBuffer,
vImageConvert_420Yp8_CbCr8ToARGB8888(&sourceLumaBuffer, &sourceChromaBuffer, &_destinationBuffer,
&infoYpCbCrToARGB, NULL, 255,
kvImagePrintDiagnosticsToConsole);

CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
CVPixelBufferRelease(pixelBuffer);

if (!conversionBuffer.height) {
vImageBuffer_Init(&conversionBuffer, sourceLumaBuffer.height, sourceLumaBuffer.width, 32,
kvImageNoFlags);
}

const uint8_t map[4] = {3, 2, 1, 0};
vImagePermuteChannels_ARGB8888(&destinationBuffer, &conversionBuffer, map, kvImageNoFlags);
vImagePermuteChannels_ARGB8888(&_destinationBuffer, &_conversionBuffer, map, kvImageNoFlags);

CVPixelBufferRef newPixelBuffer = NULL;
CVPixelBufferCreateWithBytes(NULL, conversionBuffer.width, conversionBuffer.height,
kCVPixelFormatType_32BGRA, conversionBuffer.data,
conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer);
CVPixelBufferCreateWithBytes(NULL, _conversionBuffer.width, _conversionBuffer.height,
kCVPixelFormatType_32BGRA, _conversionBuffer.data,
_conversionBuffer.rowBytes, NULL, NULL, NULL, &newPixelBuffer);

return newPixelBuffer;
}
Expand Down