Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
AutoMLAdd support for MLKit's AutoML #1500
Browse files Browse the repository at this point in the history
  • Loading branch information
EddyVerbruggen committed Dec 1, 2019
1 parent d31c10a commit ae354c9
Show file tree
Hide file tree
Showing 12 changed files with 85 additions and 11 deletions.
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ android:
- sys-img-armeabi-v7a-android-21

before_install:
# - sudo pip install --upgrade pip
# - sudo pip install six
- sudo pip install --upgrade pip
- sudo pip install six

install:
- echo no | npm install -g nativescript
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
Left
Right
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"modelFile": "model.tflite",
"labelsFile": "dict.txt",
"modelType": "IMAGE_LABELING"
}
Binary file not shown.
1 change: 1 addition & 0 deletions demo-ng/app/tabs/mlkit/automl/automl.component.html
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
width="100%"
height="100%"
opacity="0.8"
localModelResourceFolder="leftright"
processEveryNthFrame="60"
confidenceThreshold="0.4"
(scanResult)="onAutoMLResult($event)">
Expand Down
1 change: 1 addition & 0 deletions demo-ng/app/tabs/mlkit/mlkit.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,7 @@ export class MLKitComponent {
private autoML(imageSource: ImageSource): void {
firebase.mlkit.automl.labelImage({
image: imageSource,
localModelResourceFolder: "leftright",
confidenceThreshold: 0.3
}).then(
(result: MLKitAutoMLResult) => {
Expand Down
41 changes: 41 additions & 0 deletions docs/ML_KIT.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ To be able to use Cloud features you need to do two things:
|[Natural language identification](#natural-language-identification)|✅|
|[Translate text](#translate-text)|✅|
|[Smart reply](#smart-reply)|✅|
|[AutoML Vision Edge](#automl-vision-edge)|✅|✅
|[Custom model inference](#custom-model-inference)|✅|✅

### Text recognition
Expand Down Expand Up @@ -513,6 +514,46 @@ firebase.mlkit.smartreply.suggestReplies({
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
```
### AutoML Vision Edge
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_automl.png" height="153px" alt="ML Kit - AutoML Vision Edge"/>
[Firebase documentation 🌎](https://firebase.google.com/docs/ml-kit/automl-image-labeling)
> NOTE: currently only local models are supported (not cloud models), but it's fairly easy to add those so open an issue if you need it. See the demo-ng folder for an example.
#### Still image (on-device)
```typescript
import { MLKitAutoMLResult } from "nativescript-plugin-firebase/mlkit/automl";
const firebase = require("nativescript-plugin-firebase");
firebase.mlkit.automl.labelImage({
localModelResourceFolder: "leftright",
image: imageSource,
confidenceThreshold: 0.6 // this will only return labels with at least 0.6 (60%) confidence. Default 0.5.
})
.then((result: MLKitAutoMLResult) => console.log(JSON.stringify(result.labels)))
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
```
#### Live camera feed
The basics are explained above for 'Text recognition', so we're only showing the differences here.
```typescript
import { registerElement } from "nativescript-angular/element-registry";
registerElement("MLKitAutoML", () => require("nativescript-plugin-firebase/mlkit/automl").MLKitAutoML);
```
```html
<MLKitAutoML
width="260"
height="380"
localModelResourceFolder="leftright"
confidenceThreshold="0.6"
(scanResult)="onAutoMLResult($event)">
</MLKitImageLabeling>
```
### Custom model inference
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_custom_model_tflite.png" height="153px" alt="ML Kit - Custom Model (TensorFlow Lite)"/>
Expand Down
Binary file added docs/images/features/mlkit_automl.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
12 changes: 12 additions & 0 deletions src/mlkit/automl/automl-common.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import { Property } from "tns-core-modules/ui/core/properties";
import { MLKitCameraView } from "../mlkit-cameraview";

export const localModelResourceFolderProperty = new Property<MLKitAutoML, string>({
name: "localModelResourceFolder",
defaultValue: null,
});

export const confidenceThresholdProperty = new Property<MLKitAutoML, number>({
name: "confidenceThreshold",
defaultValue: 0.5,
Expand All @@ -9,11 +14,18 @@ export const confidenceThresholdProperty = new Property<MLKitAutoML, number>({
export abstract class MLKitAutoML extends MLKitCameraView {
static scanResultEvent: string = "scanResult";

protected localModelResourceFolder: string;
protected confidenceThreshold: number;


[localModelResourceFolderProperty.setNative](value: string) {
this.localModelResourceFolder = value;
}

[confidenceThresholdProperty.setNative](value: any) {
this.confidenceThreshold = parseFloat(value);
}
}

localModelResourceFolderProperty.register(MLKitAutoML);
confidenceThresholdProperty.register(MLKitAutoML);
17 changes: 12 additions & 5 deletions src/mlkit/automl/index.android.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ declare const com: any;
export class MLKitAutoML extends MLKitAutoMLBase {

protected createDetector(): any {
return getDetector(this.confidenceThreshold);
return getDetector(this.localModelResourceFolder, this.confidenceThreshold);
}

protected createSuccessListener(): any {
Expand Down Expand Up @@ -44,19 +44,26 @@ export class MLKitAutoML extends MLKitAutoMLBase {
}
}

function getDetector(confidenceThreshold: number): com.google.firebase.ml.vision.label.FirebaseVisionImageLabeler {
function getDetector(localModelResourceFolder: string, confidenceThreshold: number): com.google.firebase.ml.vision.label.FirebaseVisionImageLabeler {
// TODO also support cloud hosted models
const model = new com.google.firebase.ml.vision.automl.FirebaseAutoMLLocalModel.Builder()
.setAssetFilePath(localModelResourceFolder + "/manifest.json") // TODO this..
// .setFilePath() // .. or this
.build();

const labelDetectorOptions =
new com.google.firebase.ml.vision.label.FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder()
new com.google.firebase.ml.vision.label.FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder(model)
.setConfidenceThreshold(confidenceThreshold)
.build();

return com.google.firebase.ml.vision.FirebaseVision.getInstance().getOnDeviceAutoMLImageLabeler(labelDetectorOptions);
return com.google.firebase.ml.vision.FirebaseVision.getInstance()
.getOnDeviceAutoMLImageLabeler(labelDetectorOptions);
}

export function labelImage(options: MLKitAutoMLOptions): Promise<MLKitAutoMLResult> {
return new Promise((resolve, reject) => {
try {
const firebaseVisionAutoMLImageLabeler = getDetector(options.confidenceThreshold || 0.5);
const firebaseVisionAutoMLImageLabeler = getDetector(options.localModelResourceFolder, options.confidenceThreshold || 0.5);

const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({
onSuccess: labels => {
Expand Down
5 changes: 5 additions & 0 deletions src/mlkit/automl/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ export interface MLKitAutoMLResult extends MLKitVisionResult {
}

export interface MLKitAutoMLOptions extends MLKitVisionOptions {
/**
* The folder name in your App_Resources/iOS or App_Resources/Android/src/main/assets folder.
*/
localModelResourceFolder: string;

/**
* Evaluate your model in the Firebase console to determine an appropriate value.
* 0.5 by default.
Expand Down
8 changes: 4 additions & 4 deletions src/mlkit/automl/index.ios.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { MLKitAutoML as MLKitAutoMLBase } from "./automl-common";
export class MLKitAutoML extends MLKitAutoMLBase {

protected createDetector(): any {
return getDetector(this.confidenceThreshold);
return getDetector(this.localModelResourceFolder, this.confidenceThreshold);
}

protected createSuccessListener(): any {
Expand Down Expand Up @@ -41,8 +41,8 @@ export class MLKitAutoML extends MLKitAutoMLBase {
}
}

function getDetector(confidenceThreshold?: number): FIRVisionImageLabeler {
const manifestPath = NSBundle.mainBundle.pathForResourceOfTypeInDirectory("manifest", "json", "leftright");
function getDetector(localModelResourceFolder: string, confidenceThreshold?: number): FIRVisionImageLabeler {
const manifestPath = NSBundle.mainBundle.pathForResourceOfTypeInDirectory("manifest", "json", localModelResourceFolder);
const fIRAutoMLLocalModel = FIRAutoMLLocalModel.alloc().initWithManifestPath(manifestPath);

const options = FIRVisionOnDeviceAutoMLImageLabelerOptions.alloc().initWithLocalModel(fIRAutoMLLocalModel);
Expand All @@ -56,7 +56,7 @@ function getDetector(confidenceThreshold?: number): FIRVisionImageLabeler {
export function labelImage(options: MLKitAutoMLOptions): Promise<MLKitAutoMLResult> {
return new Promise((resolve, reject) => {
try {
const labelDetector = getDetector(options.confidenceThreshold);
const labelDetector = getDetector(options.localModelResourceFolder, options.confidenceThreshold);

labelDetector.processImageCompletion(getImage(options), (labels: NSArray<FIRVisionImageLabel>, error: NSError) => {
if (error !== null) {
Expand Down

0 comments on commit ae354c9

Please sign in to comment.