This repository was archived by the owner on Sep 14, 2023. It is now read-only.
forked from flutter/plugins
-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathFirebaseMlVisionPlugin.m
More file actions
127 lines (106 loc) · 4.97 KB
/
FirebaseMlVisionPlugin.m
File metadata and controls
127 lines (106 loc) · 4.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#import "FirebaseMlVisionPlugin.h"
@interface NSError (FlutterError)
@property(readonly, nonatomic) FlutterError *flutterError;
@end
@implementation NSError (FlutterError)
- (FlutterError *)flutterError {
return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)self.code]
message:self.domain
details:self.localizedDescription];
}
@end
@implementation FLTFirebaseMlVisionPlugin
+ (void)handleError:(NSError *)error result:(FlutterResult)result {
result([error flutterError]);
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
FlutterMethodChannel *channel =
[FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/firebase_ml_vision"
binaryMessenger:[registrar messenger]];
FLTFirebaseMlVisionPlugin *instance = [[FLTFirebaseMlVisionPlugin alloc] init];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)init {
self = [super init];
if (self) {
if (![FIRApp defaultApp]) {
[FIRApp configure];
}
}
return self;
}
- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
FIRVisionImage *image = [self dataToVisionImage:call.arguments];
NSDictionary *options = call.arguments[@"options"];
if ([@"BarcodeDetector#detectInImage" isEqualToString:call.method]) {
[BarcodeDetector handleDetection:image options:options result:result];
} else if ([@"FaceDetector#detectInImage" isEqualToString:call.method]) {
[FaceDetector handleDetection:image options:options result:result];
} else if ([@"LabelDetector#detectInImage" isEqualToString:call.method]) {
[LabelDetector handleDetection:image options:options result:result];
} else if ([@"CloudLabelDetector#detectInImage" isEqualToString:call.method]) {
[CloudLabelDetector handleDetection:image options:options result:result];
} else if ([@"TextRecognizer#processImage" isEqualToString:call.method]) {
[TextRecognizer handleDetection:image options:options result:result];
} else {
result(FlutterMethodNotImplemented);
}
}
- (FIRVisionImage *)dataToVisionImage:(NSDictionary *)imageData {
NSString *imageType = imageData[@"type"];
if ([@"file" isEqualToString:imageType]) {
UIImage *image = [UIImage imageWithContentsOfFile:imageData[@"path"]];
return [[FIRVisionImage alloc] initWithImage:image];
} else if ([@"bytes" isEqualToString:imageType]) {
FlutterStandardTypedData *byteData = imageData[@"bytes"];
NSData *imageBytes = byteData.data;
NSDictionary *metadata = imageData[@"metadata"];
NSArray *planeData = metadata[@"planeData"];
size_t planeCount = planeData.count;
size_t widths[planeCount];
size_t heights[planeCount];
size_t bytesPerRows[planeCount];
void *baseAddresses[planeCount];
baseAddresses[0] = (void *)imageBytes.bytes;
size_t lastAddressIndex = 0; // Used to get base address for each plane
for (int i = 0; i < planeCount; i++) {
NSDictionary *plane = planeData[i];
NSNumber *width = plane[@"width"];
NSNumber *height = plane[@"height"];
NSNumber *bytesPerRow = plane[@"bytesPerRow"];
widths[i] = width.unsignedLongValue;
heights[i] = height.unsignedLongValue;
bytesPerRows[i] = bytesPerRow.unsignedLongValue;
if (i > 0) {
size_t addressIndex = lastAddressIndex + heights[i - 1] * bytesPerRows[i - 1];
baseAddresses[i] = (void *)imageBytes.bytes + addressIndex;
lastAddressIndex = addressIndex;
}
}
NSNumber *width = metadata[@"width"];
NSNumber *height = metadata[@"height"];
NSNumber *rawFormat = metadata[@"rawFormat"];
FourCharCode format = FOUR_CHAR_CODE(rawFormat.unsignedIntValue);
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault, width.unsignedLongValue,
height.unsignedLongValue, format, NULL, imageBytes.length, 2,
baseAddresses, widths, heights, bytesPerRows, NULL, NULL,
NULL, &pxbuffer);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pxbuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(pxbuffer),
CVPixelBufferGetHeight(pxbuffer))];
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);
return [[FIRVisionImage alloc] initWithImage:uiImage];
} else {
NSString *errorReason = [NSString stringWithFormat:@"No image type for: %@", imageType];
@throw [NSException exceptionWithName:NSInvalidArgumentException
reason:errorReason
userInfo:nil];
}
}
@end