aboozaid / react-native-facerecognition Goto Github PK
View Code? Open in Web Editor NEWA face detection and recognition library for React Native. Faster and Accurate!
A face detection and recognition library for React Native. Faster and Accurate!
can we use this to auto capture the person image and recognise with dataset?
It recognized thogh its not trained. How can I achueve same
Here's the log I got when trying to install this package.
I hope you to follow their guide to make it better.
Thanks.
The following packages use deprecated "rnpm" config that will stop working from next release:
FAILURE: Build failed with an exception.
Could not resolve all task dependencies for configuration ':app:debugCompileClasspath'.
Could not resolve project :react-native-facerecognition.
Required by:
project :app
> Unable to find a matching configuration of project :react-native-facerecognition:
- None of the consumable configurations have attributes.
The author and maintenance of the library?
How to recognize the face without trainning
Hello guys,
I'm trying to create a simple face recognition but the thing is I can't proceed the development due to this error. I don't know where this error came from and where should I fix.
Here's the screenshot of the error that I've encountered
Android Device
Here's my code
import React, { Component } from 'react';
import {
Platform,
StyleSheet,
Text,
View,
TouchableOpacity,
ToastAndroid,
ListView,
TextInput,
TouchableHighlight,
DeviceEventEmitter
} from 'react-native';
import { RNCamera } from 'react-native-camera';
import Face from 'react-native-facerecognition'
import DialogManager, { SlideAnimation, DialogContent, DialogTitle, DialogButton } from 'react-native-dialog-component';
export default class App extends Component {
constructor(props) {
super(props);
this.ds = new ListView.DataSource({
rowHasChanged:(r1,r2) => r1 !== r2
});
const faces = [];
this.state = {
dataSource: this.ds.cloneWithRows(faces),
captured: 1,
faces: faces,
type: 'front'
};
}
componentDidMount() {
DeviceEventEmitter.addListener("onFaceRecognized", this.onFaceRecognized.bind(this))
DeviceEventEmitter.addListener("onClean", this.onClean.bind(this))
}
render() {
return (
<View style={styles.container}>
<RNCamera
ref={ref => {
this.camera = ref;
}}
style = {styles.preview}
type={this.state.type}
permissionDialogTitle={'Permission to use camera'}
permissionDialogMessage={'We need your permission to use your camera phone'}
onMountError={(msg) => this.cameraError(msg)}
onCameraReady={() => {this.onCameraReady()}}
/>
<View style={{flex: 0, flexDirection: 'row', justifyContent: 'center',}}>
<TouchableOpacity
onPress={this.takePicture.bind(this)}
style = {styles.capture}
>
<Text style={{fontSize: 14}}> SHOT </Text>
</TouchableOpacity>
<TouchableOpacity
onPress={this.recognizePicture.bind(this)}
style = {styles.capture}
>
<Text style={{fontSize: 14}}> RECOGNIZE </Text>
</TouchableOpacity>
<TouchableOpacity
onPress={() => this.setState({type: this.state.type === 'back' ? 'front' : 'back',})}
style = {styles.capture}
>
<Text style={{fontSize: 14}}> FLIP </Text>
</TouchableOpacity>
<TouchableOpacity
onPress={() => this.clean()}
style = {styles.capture}
>
<Text style={{fontSize: 14}}> CLEAN </Text>
</TouchableOpacity>
</View>
</View>
);
}
onFaceRecognized(data) {
ToastAndroid.show("Recognized: " + data.name + " Distance: " + data.distance, ToastAndroid.LONG)
}
onClean(msg) {
this.setState({faces: []})
ToastAndroid.show(msg, ToastAndroid.SHORT)
}
clean() {
Face.Clean();
}
onCameraReady() {
Face.Start(Face.Detection.DEEP, (success) => {
ToastAndroid.show("Train initialized", ToastAndroid.SHORT)
}, (error) => {
ToastAndroid.show(error, ToastAndroid.LONG)
})
}
takePicture = async function() {
if (this.camera) {
const options = { width: 200, base64: true }
const data = await this.camera.takePictureAsync(options)
Face.Detect(data.base64, (detected) => {
ToastAndroid.show(detected, ToastAndroid.SHORT)
this.setState({image64: data.base64})
this.onFaceDetect()
}, (error) => {
ToastAndroid.show(error, ToastAndroid.SHORT)
})
}
};
recognizePicture = async function() {
if (this.camera) {
const options = { width: 200, base64: true };
const data = await this.camera.takePictureAsync(options)
Face.Detect(data.base64, (detected) => {
ToastAndroid.show(detected, ToastAndroid.SHORT)
Face.Identify(data.base64, (unrecognized) => {
ToastAndroid.show(unrecognized, ToastAndroid.SHORT)
})
}, (error) => {
ToastAndroid.show(error, ToastAndroid.SHORT)
})
console.log(data.uri);
}
};
onFaceDetect() {
if(this.state.faces.length == 0)
this.newFaceDetected();
else {
DialogManager.show({
title: 'Trained Faces',
titleAlign: 'center',
haveOverlay: false,
animationDuration: 200,
SlideAnimation: new SlideAnimation({slideFrom: 'top'}),
children: (
<DialogContent >
<View>
<ListView dataSource = {this.state.dataSource} renderRow = {this.renderRow.bind(this)} />
</View>
<DialogButton text = "Close" align = 'right' onPress = {() => DialogManager.dismiss()} />
<DialogButton text = "New Face" align = 'right' onPress = {() => this.newFaceDetected()} />
</DialogContent>
),
}, () => {
console.log('callback - show')
});
}
}
newFaceDetected() {
DialogManager.show({
title: 'Train Face',
titleAlign: 'center',
haveOverlay: false,
animationDuration: 200,
SlideAnimation: new SlideAnimation({slideFrom: 'top'}),
children: (
<DialogContent>
<View>
<TextInput placeholder="face name" onChangeText={(Fname) => this.setState({Fname})} />
</View>
<DialogButton text = "Save" onPress= {() => this.newFaceImage()}/>
</DialogContent>
),
}, () => {
console.log('callback - show');
});
}
newFaceImage() {
const faces = [...this.state.faces, {Fname: this.state.Fname, captured: this.state.captured}]
const images = {image64: this.state.image64, Fname: this.state.Fname}
Face.Training(images, (result) => alert(result), (err) => alert(err))
this.setState({dataSource: this.ds.cloneWithRows(faces), faces})
DialogManager.dismissAll()
}
saveCaptureImage(faceData) {
if(faceData.captured == 5)
ToastAndroid.show("More photos are not allowed", ToastAndroid.SHORT)
else {
const slice = this.state.faces.slice()
slice.map((face) => {
if(face.Fname == faceData.Fname)
face.captured++
})
this.setState({dataSource: this.ds.cloneWithRows(slice)})
const images = {image64: this.state.image64, Fname: faceData.Fname}
Face.Training(images, (result) => alert(result), (err) => alert(err))
}
DialogManager.dismiss()
}
renderRow(rowData) {
return(
<TouchableHighlight onPress= {() => this.saveCaptureImage(rowData)} underlayColor='transparent' >
<View style = {{
flex:1,
flexDirection: 'row',
padding: 15,
alignItems: 'center',
borderColor: '#D7D7D7',
borderBottomWidth: 1
}}>
<Text style = {{fontSize: 16}}>{rowData.captured}</Text>
<View style = {{paddingLeft: 20}}>
<Text style = {{fontSize: 18}}>{rowData.Fname}</Text>
</View>
</View>
</TouchableHighlight>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
backgroundColor: 'white'
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#D7D7D7',
borderRadius: 5,
padding: 10,
paddingHorizontal: 10,
alignSelf: 'center',
margin: 10
}
});
I already follow the documentation and how it setup
I really need your help guys. Hope you can help me.
Thank you and God bless
John Mark
I want to check when the face goes out of the camera frame. How can I do that?
I meet problem with camera permission, event when I put in manifest
./android/build.gradle
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
ext {
buildToolsVersion = "26.0.2"
minSdkVersion = 21
compileSdkVersion = 26
targetSdkVersion = 29
}
repositories {
google()
jcenter()
}
dependencies {
//classpath("com.android.tools.build:gradle:3.5.3")
classpath 'com.android.tools.build:gradle:3.0.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
mavenLocal()
maven {
// All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
url("$rootDir/../node_modules/react-native/android")
}
maven {
// Android JSC is installed from npm
url("$rootDir/../node_modules/jsc-android/dist")
}
google()
jcenter()
maven { url 'https://www.jitpack.io' }
maven { url "https://maven.google.com" }
}
}
./android/app/buil.gradle
apply plugin: "com.android.application"
import com.android.build.OutputFile
/**
react-native bundle
with the correct arguments during the Android buildapply from: "../../node_modules/react-native/react.gradle"
line.project.ext.react = [
enableHermes: false, // clean and rebuild if changing
]
apply from: "../../node_modules/react-native/react.gradle"
/**
/**
/**
def jscFlavor = 'org.webkit:android-jsc-intl:+'
Date.toLocaleString
and String.localeCompare
that/**
android {
compileSdkVersion 26
buildToolsVersion "26.0.2"
compileSdkVersion rootProject.ext.compileSdkVersion
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
defaultConfig {
applicationId "com.facereco"
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode 1
versionName "1.0"
}
splits {
abi {
reset()
enable enableSeparateBuildPerCPUArchitecture
universalApk false // If true, also generate a universal APK
include "armeabi-v7a", "x86", "arm64-v8a", "x86_64"
}
}
signingConfigs {
debug {
storeFile file('debug.keystore')
storePassword 'android'
keyAlias 'androiddebugkey'
keyPassword 'android'
}
}
buildTypes {
debug {
signingConfig signingConfigs.debug
}
release {
// Caution! In production, you need to generate your own keystore file.
// see https://reactnative.dev/docs/signed-apk-android.
signingConfig signingConfigs.debug
minifyEnabled enableProguardInReleaseBuilds
proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro"
}
}
// applicationVariants are e.g. debug, release
applicationVariants.all { variant ->
variant.outputs.each { output ->
// For each separate APK per architecture, set a unique version code as described here:
// https://developer.android.com/studio/build/configure-apk-splits.html
def versionCodes = ["armeabi-v7a": 1, "x86": 2, "arm64-v8a": 3, "x86_64": 4]
def abi = output.getFilter(OutputFile.ABI)
if (abi != null) { // null for the universal-debug, universal-release variants
output.versionCodeOverride =
versionCodes.get(abi) * 1048576 + defaultConfig.versionCode
}
}
}
}
dependencies {
implementation project(':react-native-facerecognition')
implementation fileTree(dir: "libs", include: ["*.jar"])
//noinspection GradleDynamicVersion
implementation "com.facebook.react:react-native:+" // From node_modules
implementation "androidx.swiperefreshlayout:swiperefreshlayout:1.0.0"
debugImplementation("com.facebook.flipper:flipper:${FLIPPER_VERSION}") {
exclude group:'com.facebook.fbjni'
}
debugImplementation("com.facebook.flipper:flipper-network-plugin:${FLIPPER_VERSION}") {
exclude group:'com.facebook.flipper'
exclude group:'com.squareup.okhttp3', module:'okhttp'
}
debugImplementation("com.facebook.flipper:flipper-fresco-plugin:${FLIPPER_VERSION}") {
exclude group:'com.facebook.flipper'
}
if (enableHermes) {
def hermesPath = "../../node_modules/hermes-engine/android/";
debugImplementation files(hermesPath + "hermes-debug.aar")
releaseImplementation files(hermesPath + "hermes-release.aar")
} else {
implementation jscFlavor
}
}
// Run this once to be able to run the application with BUCK
// puts all compile dependencies into folder libs for BUCK to use
task copyDownloadableDepsToLibs(type: Copy) {
from configurations.compile
into 'libs'
}
apply from: file("../../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesAppBuildGradle(project)
./android/graddle/wrapper/gradle-wrapper.properties
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https://services.gradle.org/distributions/gradle-4.4-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
./android/settings.gradle
rootProject.name = 'facereco'
include ':react-native-facerecognition'
project(':react-native-facerecognition').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-facerecognition/android')
apply from: file("../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesSettingsGradle(settings)
include ':app'
include ':openCV'
project(':openCV').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-facerecognition/android/openCV')
./android/app/src/main/AndroidManifest.xml
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" android:required="true"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="true"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<application
android:name=".MainApplication"
android:label="@string/app_name"
android:icon="@mipmap/ic_launcher"
android:roundIcon="@mipmap/ic_launcher_round"
android:allowBackup="false"
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize|uiMode"
android:launchMode="singleTask">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.facebook.react.devsupport.DevSettingsActivity" />
</application>
Thanks for your lib.
Don't know that I can use training with images instead of 'this.camera'
Could not determine the dependencies of task ':app:preDebugBuild'
Could not resolve all task dependencies for configuration ':app:debugRuntimeClasspath'
Could not resolve project :react-native-facerecognition ,
......
.....
.....
etc.
plz update the package or close this .
getting this error while running on android, please help me out!
C:\Users\Fahad-Security\Desktop\RNCamera\example\node_modules\react-native-facerecognition\android\app\src\main\java\opencv\android\FaceCameraManage.java:
uses or overrides a deprecated API.
Recompile with -Xlint:deprecation for details.
error: package com.reactnative.facerecognition does not exist
I have this error:
PackageList.java:15: error: cannot find symbol
import cv.reactnative.FaceModulePackage;
This is when i run the project, can you please help...
I already add the jniLibs... Still have this error....
I got this error when i linked
FAILURE: Build failed with an exception.
Where:
Build file 'D:\mobile-app\node_modules\react-native-facerecognition\android\app\build.gradle' line: 40
What went wrong:
A problem occurred evaluating project ':react-native-facerecognition'.
Project with path ':openCV' could not be found in project ':react-native-facerecognition'.
Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output.
Get more help at https://help.gradle.org
my react-native version is 0.57.3
react-native-cli: 2.0.1
it said cannot find openCV module but there is on your package
Thankyou in advance
Got two errors with your example while start it:
Error while updating property 'mounted' of a view managed of :FaceCamera
null
No implementation found for long org.opencv.face.LBPHFaceRecognizer.created_0(int,int,int,int,double) (tried Java_org_opencv_face_LBPHFaceRecognizer_create_10 and Java_org_opencv_face_LBPHFaceRecognizer_create_10__IIID)
updateViewProp
ViewManagersPropertyCache.java: 95
Any suggestion, Thanks
/home/shubham/Desktop/projects/FaceRecognition/android/app/build/generated/rncli/src/main/java/com/facebook/react/PackageList.java:18: error: cannot find symbol
import cv.reactnative.FaceModulePackage;
^
symbol: class FaceModulePackage
location: package cv.reactnative
/home/shubham/Desktop/projects/FaceRecognition/android/app/build/generated/rncli/src/main/java/com/facebook/react/PackageList.java:64: error: cannot find symbol
new FaceModulePackage()
^
symbol: class FaceModulePackage
location: class PackageList
2 errors
FAILURE: Build failed with an exception.
Compilation failed; see the compiler error output for details.
Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
Get more help at https://help.gradle.org
cant run, even with cloning this project
error: cannot find symbol
new FaceModulePackage()
methods :
clone example repo, npm install, copy v8a. cant run
Even though i managed to get It up and Running,
It says "OpenCv manager not found. Trying to install OpenCv"
its visibly seen throwing error
com.facebook.react.bridge.JSApplicationIllegalArgumentException: Error while updating property 'mounted' of a view managed by: FaceCamera
at com.facebook.react.uimanager.ViewManagersPropertyCache$PropSetter.updateViewProp(ViewManagersPropertyCache.java:98)
at com.facebook.react.uimanager.ViewManagerPropertyUpdater$FallbackViewManagerSetter.setProperty(ViewManagerPropertyUpdater.java:131)
at com.facebook.react.uimanager.ViewManagerPropertyUpdater.updateProps(ViewManagerPropertyUpdater.java:51)
at com.facebook.react.uimanager.ViewManager.updateProperties(ViewManager.java:46)
at com.facebook.react.uimanager.NativeViewHierarchyManager.updateProperties(NativeViewHierarchyManager.java:139)
at com.facebook.react.uimanager.UIViewOperationQueue$UpdatePropertiesOperation.execute(UIViewOperationQueue.java:93)
I am new to react native and there are many error occurs during running this app , can you please guide me step by step how to run this app ?
Describe the bug
A clear and concise description of what the bug is.
To Reproduce
Steps to reproduce the behavior:
Expected behavior
A clear and concise description of what you expected to happen.
Screenshots
If applicable, add screenshots to help explain your problem.
Smartphone (please complete the following information):
Additional context
Add any other context about the problem here.
Hi, the links provided for jniLibs files is expired. I'm unable to download them. App crashes on arm64_v8a cpu mobile phones
Can I use this library on the ios platform?
Is your feature request related to a problem? Please describe.
First of all, thanks for the amazing possibilities you opened by making this library.
My only question after reading your documentation is regarding the dataset itself.
I get a feeling that this is managed by you or your library currently using either your server or phone's storage.
Please correct me if my assumption was wrong :)
If I want to integrate my app with this feature to existing database, how would I go about it?
Describe the solution you'd like
It would be amazing if we can supply the dataset too to your library so that we could have integrated system/database for say a Guest Book app across multiple branches of the same company.
Please let me know! :)
Thanks again!
Faces matching.
How can I compare two faces and determine if it is the same person?
Face's dataset
I want to create and store face's dataset for authentication.
Is it possible ? How can I do it?
@assemmohamedali thanks for this module
how to solve this?
After Installation, while opening the App it is Getting crashed
Hi @assemmohamedali,
It would be possible to implement the following features:
hi!Is there any way to get rid of the line?
Hey, I am getting the above-mentioned error while running your example code,, can you help?
Adapt to rn0.6.0 +?
2.Package not found?
OpenCV Manager package was not found!Try to install it?
3.Error while updating property 'mounted' of a view managed of :FaceCamera
null
surroundings:
"react": "16.8.3",
"react-native": "0.59.9",
"react-native-facerecognition": "^3.1.0"
arm64-v7a
Hello. Can you send link for x86 CPU, x86_64 CPU, arm64-v8a CPU (react-native-facerecognition/android/app/src/main/jniLibs).
Please?
i've been trying to use this repo and make some update
but the problem i think is in NativeModules.Face
does NativeModules is still usable ?
Hi @assemmohamedali,
Take a look at this graphic that I made.
Let me know what you think.
import React, { Component } from 'react';
import {
Platform,
StyleSheet,
Text,
View,
TouchableOpacity,
ToastAndroid,
ListView,
TextInput,
TouchableHighlight,
DeviceEventEmitter,
StatusBar,
Image,
} from 'react-native';
import { RNCamera } from 'react-native-camera';
import Face from 'react-native-facerecognition';
import DialogManager, {
SlideAnimation,
DialogContent,
DialogTitle,
DialogButton,
} from 'react-native-dialog-component';
export default class App extends Component {
constructor(props) {
super(props);
this.ds = new ListView.DataSource({
rowHasChanged: (r1, r2) => r1 !== r2,
});
const faces = [];
this.state = {
dataSource: this.ds.cloneWithRows(faces),
captured: 1,
faces: faces,
type: 'front',
};
}
componentDidMount() {
StatusBar.setHidden(true);
DeviceEventEmitter.addListener(
'onFaceRecognized',
this.onFaceRecognized.bind(this)
);
DeviceEventEmitter.addListener('onClean', this.onClean.bind(this));
}
render() {
return (
<View style={styles.container}>
<RNCamera
ref={ref => {
this.camera = ref;
}}
style={styles.preview}
type={this.state.type}
permissionDialogTitle={'Permission to use camera'}
permissionDialogMessage={
'We need your permission to use your camera phone'
}
onMountError={msg => this.cameraError(msg)}
onCameraReady={() => {
this.onCameraReady();
}}>
<View
style={{
flex: 0,
flexDirection: 'row',
justifyContent: 'flex-end',
}}>
<TouchableOpacity
onPress={this.recognizePicture.bind(this)}
style={styles.capture}>
<Image
style={{ width: 35, height: 35 }}
source={{
uri:
'https://cdn3.iconfinder.com/data/icons/antivirus-internet-security-thin-colored-outline/33/facial_recognition-512.png',
}}
/>
</TouchableOpacity>
<TouchableOpacity
onPress={() =>
this.setState({
type: this.state.type === 'back' ? 'front' : 'back',
})
}
style={styles.capture}>
<Image
style={{ width: 35, height: 35 }}
source={{
uri:
'https://user-images.githubusercontent.com/20476002/43048582-2fa1b864-8dea-11e8-968a-2bb209302604.png',
}}
/>
</TouchableOpacity>
<TouchableOpacity
onPress={() => this.clean()}
style={styles.capture}>
<Image
style={{ width: 35, height: 35 }}
source={{
uri:
'https://cdn2.iconfinder.com/data/icons/basic-4/512/delete-512.png',
}}
/>
</TouchableOpacity>
</View>
<View style={{ flex: 1, justifyContent: 'flex-end' }}>
<TouchableOpacity
onPress={this.takePicture.bind(this)}
style={styles.capture}>
<Image
style={{ width: 35, height: 35 }}
source={{
uri:
'https://user-images.githubusercontent.com/20476002/43048598-67bed272-8dea-11e8-9736-a6ebb6e28362.png',
}}
/>
</TouchableOpacity>
</View>
</RNCamera>
</View>
);
}
onFaceRecognized(data) {
ToastAndroid.show(
'Recognized: ' + data.name + ' Distance: ' + data.distance,
ToastAndroid.LONG
);
}
onClean(msg) {
this.setState({ faces: [] });
ToastAndroid.show(msg, ToastAndroid.SHORT);
}
clean() {
Face.Clean();
}
onCameraReady() {
Face.Start(
Face.Detection.DEEP,
success => {
ToastAndroid.show('Train initialized', ToastAndroid.SHORT);
},
error => {
ToastAndroid.show(error, ToastAndroid.LONG);
}
);
}
takePicture = async function() {
if (this.camera) {
const options = { width: 200, base64: true };
const data = await this.camera.takePictureAsync(options);
Face.Detect(
data.base64,
detected => {
ToastAndroid.show(detected, ToastAndroid.SHORT);
this.setState({ image64: data.base64 });
this.onFaceDetect();
},
error => {
ToastAndroid.show(error, ToastAndroid.SHORT);
}
);
}
};
recognizePicture = async function() {
if (this.camera) {
const options = { width: 200, base64: true };
const data = await this.camera.takePictureAsync(options);
Face.Detect(
data.base64,
detected => {
ToastAndroid.show(detected, ToastAndroid.SHORT);
Face.Identify(data.base64, unrecognized => {
ToastAndroid.show(unrecognized, ToastAndroid.SHORT);
});
},
error => {
ToastAndroid.show(error, ToastAndroid.SHORT);
}
);
console.log(data.uri);
}
};
onFaceDetect() {
if (this.state.faces.length == 0) this.newFaceDetected();
else {
DialogManager.show(
{
title: 'Trained Faces',
titleAlign: 'center',
haveOverlay: false,
animationDuration: 200,
SlideAnimation: new SlideAnimation({ slideFrom: 'top' }),
children: (
<DialogContent>
<View>
<ListView
dataSource={this.state.dataSource}
renderRow={this.renderRow.bind(this)}
/>
</View>
<DialogButton
text="Close"
align="right"
onPress={() => DialogManager.dismiss()}
/>
<DialogButton
text="New Face"
align="right"
onPress={() => this.newFaceDetected()}
/>
</DialogContent>
),
},
() => {
console.log('callback - show');
}
);
}
}
newFaceDetected() {
DialogManager.show(
{
title: 'Train Face',
titleAlign: 'center',
haveOverlay: false,
animationDuration: 200,
SlideAnimation: new SlideAnimation({ slideFrom: 'top' }),
children: (
<DialogContent>
<View>
<TextInput
placeholder="face name"
onChangeText={Fname => this.setState({ Fname })}
/>
</View>
<DialogButton text="Save" onPress={() => this.newFaceImage()} />
</DialogContent>
),
},
() => {
console.log('callback - show');
}
);
}
newFaceImage() {
const faces = [
...this.state.faces,
{ Fname: this.state.Fname, captured: this.state.captured },
];
const images = { image64: this.state.image64, Fname: this.state.Fname };
Face.Training(images, result => alert(result), err => alert(err));
this.setState({ dataSource: this.ds.cloneWithRows(faces), faces });
DialogManager.dismissAll();
}
saveCaptureImage(faceData) {
if (faceData.captured == 5)
ToastAndroid.show('More photos are not allowed', ToastAndroid.SHORT);
else {
const slice = this.state.faces.slice();
slice.map(face => {
if (face.Fname == faceData.Fname) face.captured++;
});
this.setState({ dataSource: this.ds.cloneWithRows(slice) });
const images = { image64: this.state.image64, Fname: faceData.Fname };
Face.Training(images, result => alert(result), err => alert(err));
}
DialogManager.dismiss();
}
renderRow(rowData) {
return (
<TouchableHighlight
onPress={() => this.saveCaptureImage(rowData)}
underlayColor="transparent">
<View
style={{
flex: 1,
flexDirection: 'row',
padding: 15,
alignItems: 'center',
borderColor: '#D7D7D7',
borderBottomWidth: 1,
}}>
<Text style={{ fontSize: 16 }}>{rowData.captured}</Text>
<View style={{ paddingLeft: 20 }}>
<Text style={{ fontSize: 18 }}>{rowData.Fname}</Text>
</View>
</View>
</TouchableHighlight>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
backgroundColor: 'white',
},
preview: {
flex: 1,
//justifyContent: 'flex-end',
//alignItems: 'center',
},
capture: {
flex: 0,
//backgroundColor: '#D7D7D7',
//borderRadius: 5,
padding: 10,
//paddingHorizontal: 10,
alignSelf: 'center',
margin: 10,
},
});
A declarative, efficient, and flexible JavaScript library for building user interfaces.
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google ❤️ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.