mlkit
a flutter plugin to use the firebase ml kit.
this is not official package
the flutter team now has the firebase_ml_vision or firebase_ml_custom package for firebase ml kit. please consider trying to use firebase_ml_vision.
note: this plugin is still under development, and some apis might not be available yet. feedback and pull requests are most welcome!
features
feature | android | ios |
---|---|---|
recognize text(on device) | ✅ | ✅ |
recognize text(cloud) | yet | yet |
detect faces(on device) | ✅ | ✅ |
scan barcodes(on device) | ✅ | ✅ |
label images(on device) | ✅ | ✅ |
label images(cloud) | yet | yet |
object detection & tracking | yet | yet |
recognize landmarks(cloud) | yet | yet |
language identification | ✅ | ✅ |
translation | yet | yet |
smart reply | yet | yet |
automl model inference | yet | yet |
custom model(on device) | ✅ | ✅ |
custom model(cloud) | ✅ | ✅ |
what features are available on device or in the cloud?
usage
to use this plugin, add mlkit
as a dependency in your pubspec.yaml file.
getting started
check out the example
directory for a sample app using firebase cloud messaging.
android integration
to integrate your plugin into the android part of your app, follow these steps:
- using the firebase console add an android app to your project: follow the assistant, download the generated
google-services.json
file and place it insideandroid/app
. next, modify theandroid/build.gradle
file and theandroid/app/build.gradle
file to add the google services plugin as described by the firebase assistant.
ios integration
to integrate your plugin into the ios part of your app, follow these steps:
- using the firebase console add an ios app to your project: follow the assistant, download the generated
googleservice-info.plist
file, openios/runner.xcworkspace
with xcode, and within xcode place the file insideios/runner
. don’t follow the steps named “add firebase sdk” and “add initialization code” in the firebase assistant.
dart/flutter integration
from your dart code, you need to import the plugin and instantiate it:
import 'package:mlkit/mlkit.dart';
firebasevisiontextdetector detector = firebasevisiontextdetector.instance;
// detect form file/image by path
var currentlabels = await detector.detectfrompath(_file?.path);
// detect from binary data of a file/image
var currentlabels = await detector.detectfrombinary(_file?.readasbytessync());
custom model interpreter
import 'package:mlkit/mlkit.dart';
import 'package:image/image.dart' as img;
firebasemodelinterpreter interpreter = firebasemodelinterpreter.instance;
firebasemodelmanager manager = firebasemodelmanager.instance;
//register cloud model
manager.registerremotemodelsource(
firebaseremotemodelsource(modelname: "mobilenet_v1_224_quant"));
//register local backup
manager.registerlocalmodelsource(firebaselocalmodelsource(modelname: 'mobilenet_v1_224_quant', assetfilepath: 'ml/mobilenet_v1_224_quant.tflite');
var imagebytes = (await rootbundle.load("assets/mountain.jpg")).buffer;
img.image image = img.decodejpg(imagebytes.asuint8list());
image = img.copyresize(image, 224, 224);
//the app will download the remote model. while the remote model is being downloaded, it will use the local model.
var results = await interpreter.run(
remotemodelname: "mobilenet_v1_224_quant",
localmodelname: "mobilenet_v1_224_quant",
inputoutputoptions: firebasemodelinputoutputoptions([
firebasemodeliooption(firebasemodeldatatype.float32, [1, 224, 224, 3])
], [
firebasemodeliooption(firebasemodeldatatype.float32, [1, 1001])
]),
inputbytes: imagetobytelist(image));
// int model
uint8list imagetobytelist(img.image image) {
var _inputsize = 224;
var convertedbytes = new uint8list(1 * _inputsize * _inputsize * 3);
var buffer = new bytedata.view(convertedbytes.buffer);
int pixelindex = 0;
for (var i = 0; i < _inputsize; i++) {
for (var j = 0; j < _inputsize; j++) {
var pixel = image.getpixel(i, j);
buffer.setuint8(pixelindex, (pixel >> 16) & 0xff);
pixelindex++;
buffer.setuint8(pixelindex, (pixel >> 8) & 0xff);
pixelindex++;
buffer.setuint8(pixelindex, (pixel) & 0xff);
pixelindex++;
}
}
return convertedbytes;
}
// float model
uint8list imagetobytelist(img.image image) {
var _inputsize = 224;
var convertedbytes = float32list(1 * _inputsize * _inputsize * 3);
var buffer = float32list.view(convertedbytes.buffer);
int pixelindex = 0;
for (var i = 0; i < _inputsize; i++) {
for (var j = 0; j < _inputsize; j++) {
var pixel = image.getpixel(i, j);
buffer[pixelindex] = ((pixel >> 16) & 0xff) / 255;
pixelindex += 1;
buffer[pixelindex] = ((pixel >> 8) & 0xff) / 255;
pixelindex += 1;
buffer[pixelindex] = ((pixel) & 0xff) / 255;
pixelindex += 1;
}
}
return convertedbytes.buffer.asuint8list();
}
Comments are closed.