Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
67fa8fd
Add a placeholder for the live preview screen.
dustin-graham Jul 7, 2018
c864974
WIP. basic barcode detection impl.
dustin-graham Jul 7, 2018
73f4fe2
WIP.
dustin-graham Jul 7, 2018
04395d8
rotate image on Android prior to creating the FirebaseVisionImage
dustin-graham Jul 9, 2018
cb953e7
Add a basic camera preview implementation
dustin-graham Jul 9, 2018
973b32c
resume camera preview
dustin-graham Jul 9, 2018
8689993
strip out picture taking and video recording from Android camera impl
dustin-graham Jul 9, 2018
beaaa5f
android: insert per-frame handling of preview images
dustin-graham Jul 9, 2018
1ad5ae7
android: pipe image frames to MLKit Barcode detector
dustin-graham Jul 9, 2018
76930fc
android: send count of recognized barcodes
dustin-graham Jul 10, 2018
a0a1b62
android: get barcode bounding boxes displaying in Flutter
dustin-graham Jul 10, 2018
56272e4
ios: Add basic barcode scanning.
dustin-graham Jul 10, 2018
4e10f44
ios: Add live view barcode scanning
dustin-graham Jul 11, 2018
1313564
WIP: live text detection
dustin-graham Jul 13, 2018
99196c3
WIP: android live text detection.
dustin-graham Jul 14, 2018
dacaccf
WIP: Android legacy camera detector impl. Min SDK on this lib is 16.
dustin-graham Jul 19, 2018
6054f37
Android: allow detector and resolution to be set
dustin-graham Jul 19, 2018
de93bc7
Working live detection implementation for Android and iOS
dustin-graham Jul 20, 2018
7c8cca4
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 20, 2018
4c5bf21
update Android with latest from upstream.
dustin-graham Jul 20, 2018
2219e8e
update both Android and iOS live view to work with new detectors.
dustin-graham Jul 20, 2018
8477cf9
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 20, 2018
d1d24e7
remove unused ExifInterface dependency
dustin-graham Jul 21, 2018
b85bdb8
resolve dart analysis warnings.
dustin-graham Jul 21, 2018
a86fe62
reformat code.
dustin-graham Jul 21, 2018
e688aef
fix barcode test
dustin-graham Jul 21, 2018
762b04b
revert accidental camera plugin changes
dustin-graham Jul 21, 2018
8190bff
clang-format iOS files
dustin-graham Jul 21, 2018
f98ce94
Clean up LiveView implementation and fix formatting issues.
dustin-graham Jul 26, 2018
9fdee7c
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 26, 2018
d7e6b32
update to support new LabelDetector.
dustin-graham Jul 26, 2018
776294d
undo inadvertent formatting changes outside firebase_ml_vision.
dustin-graham Jul 26, 2018
ef72e0a
add camera plugin as a dependency to firebase_ml_vision
dustin-graham Jul 27, 2018
543b6d7
wIP: send detected data back to flutter from live feed.
dustin-graham Jul 27, 2018
22e10c2
fix formatting issues.
dustin-graham Jul 27, 2018
858b2aa
restore normal camera functionality.
dustin-graham Jul 27, 2018
5bbbcbc
Merge remote-tracking branch 'upstream/master' into live_preview_came…
dustin-graham Aug 9, 2018
e91181f
present detection boundaries in Flutter from camera plugin integration
dustin-graham Aug 9, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
update Android with latest from upstream.
  • Loading branch information
dustin-graham committed Jul 20, 2018
commit 4c5bf215a7c35e286d27a9c1fda2cf6483c0a1d5
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import android.graphics.Point;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;

import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.FirebaseVision;
Expand All @@ -15,15 +17,19 @@
import java.util.List;
import java.util.Map;

class BarcodeDetector implements Detector {
public class BarcodeDetector extends Detector {
public static final BarcodeDetector instance = new BarcodeDetector();
private static FirebaseVisionBarcodeDetector barcodeDetector;

private BarcodeDetector() {}

@Override
public void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result) {
public void close(@Nullable OperationFinishedCallback callback) {

}

@Override
void processImage(FirebaseVisionImage image, Map<String, Object> options, final OperationFinishedCallback finishedCallback) {
if (barcodeDetector == null)
barcodeDetector = FirebaseVision.getInstance().getVisionBarcodeDetector();

Expand Down Expand Up @@ -178,14 +184,14 @@ public void onSuccess(List<FirebaseVisionBarcode> firebaseVisionBarcodes) {

barcodes.add(barcodeMap);
}
result.success(barcodes);
finishedCallback.success(BarcodeDetector.this, barcodes);
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
result.error("barcodeDetectorError", exception.getLocalizedMessage(), null);
finishedCallback.error(new DetectorException("barcodeDetectorError", exception.getLocalizedMessage(), null));
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import com.google.firebase.ml.vision.common.FirebaseVisionImage;

import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;

public abstract class Detector {
Expand All @@ -25,7 +26,7 @@ public void handleDetection(
if (shouldThrottle.get()) {
return;
}
processImage(image, new OperationFinishedCallback() {
processImage(image, options, new OperationFinishedCallback() {
@Override
public void success(Detector detector, Object data) {
shouldThrottle.set(false);
Expand All @@ -45,5 +46,5 @@ public void error(DetectorException e) {
}

abstract void processImage(
FirebaseVisionImage image, OperationFinishedCallback finishedCallback);
FirebaseVisionImage image, Map<String, Object> options, OperationFinishedCallback finishedCallback);
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package io.flutter.plugins.firebasemlvision;

import android.support.annotation.NonNull;
import android.support.annotation.Nullable;

import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.FirebaseVision;
Expand All @@ -15,15 +17,18 @@
import java.util.List;
import java.util.Map;

class FaceDetector implements Detector {
public class FaceDetector extends Detector {
public static final FaceDetector instance = new FaceDetector();

private FaceDetector() {}

@Override
public void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result) {
public void close(@Nullable OperationFinishedCallback callback) {
// TODO: figure out if we still need to do this
}

@Override
void processImage(FirebaseVisionImage image, Map<String, Object> options, final OperationFinishedCallback finishedCallback) {
FirebaseVisionFaceDetector detector;
if (options == null) {
detector = FirebaseVision.getInstance().getVisionFaceDetector();
Expand Down Expand Up @@ -72,14 +77,14 @@ public void onSuccess(List<FirebaseVisionFace> firebaseVisionFaces) {
faces.add(faceData);
}

result.success(faces);
finishedCallback.success(FaceDetector.this, faces);
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
result.error("faceDetectorError", exception.getLocalizedMessage(), null);
finishedCallback.error(new DetectorException("faceDetectorError", exception.getLocalizedMessage(), null));
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ public void onMethodCall(MethodCall call, final Result result) {
camera.stop();
}
camera = new LegacyCamera(registrar, resolutionPreset, Integer.parseInt(cameraName)); //new Camera(registrar, cameraName, resolutionPreset, result);
camera.setMachineLearningFrameProcessor(TextDetector.instance);
camera.setMachineLearningFrameProcessor(TextDetector.instance, options);
try {
camera.start(new LegacyCamera.OnCameraOpenedCallback() {
@Override
Expand Down Expand Up @@ -174,17 +174,20 @@ public void onFailed(Exception e) {
case "barcode":
detector = BarcodeDetector.instance;
break;
case "face":
detector = FaceDetector.instance;
break;
default:
detector = TextDetector.instance;
}
camera.setMachineLearningFrameProcessor(detector);
camera.setMachineLearningFrameProcessor(detector, options);
}
result.success(null);
break;
case "BarcodeDetector#detectInImage":
try {
image = filePathToVisionImage((String) call.argument("path"));
BarcodeDetector.instance.handleDetection(image, options, result);
BarcodeDetector.instance.handleDetection(image, options, handleDetection(result));
} catch (IOException e) {
result.error("barcodeDetectorIOError", e.getLocalizedMessage(), null);
} catch (Exception e) {
Expand All @@ -194,7 +197,7 @@ public void onFailed(Exception e) {
case "FaceDetector#detectInImage":
try {
image = filePathToVisionImage((String) call.argument("path"));
FaceDetector.instance.handleDetection(image, options, result);
FaceDetector.instance.handleDetection(image, options, handleDetection(result));
} catch (IOException e) {
result.error("faceDetectorIOError", e.getLocalizedMessage(), null);
} catch (Exception e) {
Expand All @@ -206,7 +209,7 @@ public void onFailed(Exception e) {
case "TextDetector#detectInImage":
try {
image = filePathToVisionImage((String) call.argument("path"));
TextDetector.instance.handleDetection(image, options, result);
TextDetector.instance.handleDetection(image, options, handleDetection(result));
} catch (IOException e) {
result.error("textDetectorIOError", e.getLocalizedMessage(), null);
} catch (Exception e) {
Expand All @@ -218,6 +221,20 @@ public void onFailed(Exception e) {
}
}

private Detector.OperationFinishedCallback handleDetection(final Result result) {
return new Detector.OperationFinishedCallback() {
@Override
public void success(Detector detector, Object data) {
result.success(data);
}

@Override
public void error(DetectorException e) {
e.sendError(result);
}
};
}

private FirebaseVisionImage filePathToVisionImage(String path) throws IOException {
File file = new File(path);
return FirebaseVisionImage.fromFilePath(registrar.context(), Uri.fromFile(file));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
package io.flutter.plugins.firebasemlvision;

import android.support.annotation.Nullable;

import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import io.flutter.plugin.common.MethodChannel;
import java.util.Map;

class LabelDetector implements Detector {
public class LabelDetector extends Detector {

@Override
public void close(@Nullable OperationFinishedCallback callback) {

}

@Override
public void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result) {}
}
void processImage(FirebaseVisionImage image, Map<String, Object> options, OperationFinishedCallback finishedCallback) {

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import android.graphics.Point;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;

import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.FirebaseVision;
Expand All @@ -15,15 +17,19 @@
import java.util.List;
import java.util.Map;

public class TextDetector implements Detector {
public class TextDetector extends Detector {
public static final TextDetector instance = new TextDetector();
private static FirebaseVisionTextDetector textDetector;

private TextDetector() {}

@Override
public void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result) {
public void close(@Nullable OperationFinishedCallback callback) {

}

@Override
void processImage(FirebaseVisionImage image, Map<String, Object> options, final OperationFinishedCallback finishedCallback) {
if (textDetector == null) textDetector = FirebaseVision.getInstance().getVisionTextDetector();
textDetector
.detectInImage(image)
Expand Down Expand Up @@ -59,14 +65,14 @@ public void onSuccess(FirebaseVisionText firebaseVisionText) {
blockData.put("lines", lines);
blocks.add(blockData);
}
result.success(blocks);
finishedCallback.success(TextDetector.this, blocks);
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
result.error("textDetectorError", exception.getLocalizedMessage(), null);
finishedCallback.error(new DetectorException("textDetectorError", exception.getLocalizedMessage(), null));
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ private void processImage(Image image) {
.build();
FirebaseVisionImage firebaseVisionImage = FirebaseVisionImage.fromByteBuffer(imageBuffer, metadata);

currentDetector.handleDetection(firebaseVisionImage, liveDetectorFinishedCallback);
currentDetector.handleDetection(firebaseVisionImage, new HashMap<String, Object>(), liveDetectorFinishedCallback);

// FirebaseVisionBarcodeDetector visionBarcodeDetector = FirebaseVision.getInstance().getVisionBarcodeDetector();
// visionBarcodeDetector.detectInImage(firebaseVisionImage).addOnSuccessListener(new OnSuccessListener<List<FirebaseVisionBarcode>>() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@
import io.flutter.plugins.firebasemlvision.BarcodeDetector;
import io.flutter.plugins.firebasemlvision.Detector;
import io.flutter.plugins.firebasemlvision.DetectorException;
import io.flutter.plugins.firebasemlvision.FaceDetector;
import io.flutter.plugins.firebasemlvision.LabelDetector;
import io.flutter.plugins.firebasemlvision.TextDetector;
import io.flutter.view.FlutterView;

Expand Down Expand Up @@ -123,6 +125,7 @@ public interface OnCameraOpenedCallback {

private final Object processorLock = new Object();
private Detector detector;
private Map<String, Object> detectorOptions;

/**
* Map to convert between a byte array, received from the camera, and its associated byte buffer.
Expand All @@ -145,6 +148,10 @@ public void success(Detector detector, Object data) {
dataType = "barcode";
} else if (detector instanceof TextDetector) {
dataType = "text";
} else if (detector instanceof LabelDetector) {
dataType = "label";
} else if (detector instanceof FaceDetector) {
dataType = "face";
} else {
// unsupported live detector
return;
Expand Down Expand Up @@ -658,12 +665,13 @@ public void onPreviewFrame(byte[] data, Camera camera) {
}
}

public void setMachineLearningFrameProcessor(Detector processor) {
public void setMachineLearningFrameProcessor(Detector processor, @Nullable Map<String, Object> options) {
synchronized (processorLock) {
if (detector != null) {
detector.close(null);
}
detector = processor;
detectorOptions = options;
}
}

Expand Down Expand Up @@ -795,7 +803,7 @@ public void run() {
.setRotation(rotation)
.build();
FirebaseVisionImage image = FirebaseVisionImage.fromByteBuffer(data, metadata);
detector.handleDetection(image, liveDetectorFinishedCallback);
detector.handleDetection(image, detectorOptions, liveDetectorFinishedCallback);
}
} catch (Throwable t) {
Log.e(TAG, "Exception thrown from receiver.", t);
Expand Down
21 changes: 20 additions & 1 deletion packages/firebase_ml_vision/example/lib/detector_painters.dart
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,28 @@
import 'dart:ui';

import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'package:firebase_ml_vision/live_view.dart';
import 'package:flutter/material.dart';

enum Detector { barcode, face, label, text }
CustomPaint customPaintForResults(
Size imageSize, LiveViewDetectionList results) {
CustomPainter painter;
if (results is LiveViewBarcodeDetectionList) {
painter = new BarcodeDetectorPainter(imageSize, results.data);
} else if (results is LiveViewTextDetectionList) {
painter = new TextDetectorPainter(imageSize, results.data);
} else if (results is LiveViewFaceDetectionList) {
painter = new FaceDetectorPainter(imageSize, results.data);
} else if (results is LiveViewLabelDetectionList) {
painter = new LabelDetectorPainter(imageSize, results.data);
} else {
painter = null;
}

return new CustomPaint(
painter: painter,
);
}

class BarcodeDetectorPainter extends CustomPainter {
BarcodeDetectorPainter(this.absoluteImageSize, this.barcodeLocations);
Expand Down
12 changes: 6 additions & 6 deletions packages/firebase_ml_vision/example/lib/live_preview.dart
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class LivePreview extends StatefulWidget {
class LivePreviewState extends State<LivePreview> {
bool _isShowingPreview = false;
LiveViewCameraLoadStateReady _readyLoadState;
GlobalKey<LiveViewState> _liveViewKey = new GlobalKey();

Stream<LiveViewCameraLoadState> _prepareCameraPreview() async* {
if (_readyLoadState != null) {
Expand Down Expand Up @@ -60,7 +61,7 @@ class LivePreviewState extends State<LivePreview> {
}

Future<Null> setLiveViewDetector() async {
return FirebaseVision.instance.setLiveViewRecognizer(widget.detector);
return _readyLoadState?.controller?.setDetector(widget.detector);
}

@override
Expand All @@ -76,7 +77,7 @@ class LivePreviewState extends State<LivePreview> {
initialData: new LiveViewCameraLoadStateLoading(),
builder: (BuildContext context,
AsyncSnapshot<LiveViewCameraLoadState> snapshot) {
final loadState = snapshot.data;
final LiveViewCameraLoadState loadState = snapshot.data;
if (loadState != null) {
if (loadState is LiveViewCameraLoadStateLoading ||
loadState is LiveViewCameraLoadStateLoaded) {
Expand All @@ -91,12 +92,11 @@ class LivePreviewState extends State<LivePreview> {
aspectRatio: _readyLoadState.controller.value.aspectRatio,
child: new LiveView(
controller: _readyLoadState.controller,
overlayBuilder:
(BuildContext context, Size previewSize, dynamic data) {
overlayBuilder: (BuildContext context, Size previewSize,
LiveViewDetectionList data) {
return data == null
? new Container()
: customPaintForResults(
widget.detector, previewSize, data);
: customPaintForResults(previewSize, data);
},
),
);
Expand Down
Loading