Add support for multiface mode, appearance identifiers, emoji, and landscape mode.

This commit is contained in:
Abraham Hedtke 2015-12-23 01:10:49 -05:00 committed by toby cabot
parent bf440ea9c0
commit e582107431
69 changed files with 1492 additions and 918 deletions

View file

@ -15,4 +15,6 @@ In order to use this project, you will need to:
See the comment section at the top of the MainActivity.java file for more information.
Copyright (c) 2014-2015 Affectiva. All rights reserved.
***
This app uses some of the excellent [Emoji One emojis](http://emojione.com).

View file

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.id="affdexme-android" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$" external.system.id="GRADLE" external.system.module.group="" external.system.module.version="unspecified" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="java-gradle" name="Java-Gradle">
<configuration>
<option name="BUILD_FOLDER_PATH" value="$MODULE_DIR$/build" />
<option name="BUILDABLE" value="false" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.gradle" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

2
app/.gitignore vendored
View file

@ -3,6 +3,8 @@
/jniLibs/armeabi-v7a
/libs/Affdex-sdk.jar
/libs/Affdex-sdk-javadoc.jar
/libs/javax.inject-1.jar
/libs/dagger-1.2.2.jar
/src/main/assets/Affdex/*license*
/src/main/assets/Affdex/Classifiers
/src/main/assets/Affdex/Classifiers/v_9

View file

@ -1,93 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module external.linked.project.id=":app" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$/.." external.system.id="GRADLE" external.system.module.group="affdexme-android" external.system.module.version="unspecified" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="android-gradle" name="Android-Gradle">
<configuration>
<option name="GRADLE_PROJECT_PATH" value=":app" />
</configuration>
</facet>
<facet type="android" name="Android">
<configuration>
<option name="SELECTED_BUILD_VARIANT" value="debug" />
<option name="SELECTED_TEST_ARTIFACT" value="_android_test_" />
<option name="ASSEMBLE_TASK_NAME" value="assembleDebug" />
<option name="COMPILE_JAVA_TASK_NAME" value="compileDebugSources" />
<option name="ASSEMBLE_TEST_TASK_NAME" value="assembleDebugAndroidTest" />
<option name="COMPILE_JAVA_TEST_TASK_NAME" value="compileDebugAndroidTestSources" />
<afterSyncTasks>
<task>generateDebugAndroidTestSources</task>
<task>generateDebugSources</task>
</afterSyncTasks>
<option name="ALLOW_USER_CONFIGURATION" value="false" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/src/main/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/src/main/res" />
<option name="RES_FOLDERS_RELATIVE_PATH" value="file://$MODULE_DIR$/src/main/res" />
<option name="ASSETS_FOLDER_RELATIVE_PATH" value="/src/main/assets" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/build/intermediates/classes/debug" />
<output-test url="file://$MODULE_DIR$/build/intermediates/classes/androidTest/debug" />
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/r/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/debug" isTestSource="false" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/debug" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/debug" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/r/androidTest/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/androidTest/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/androidTest/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/androidTest/debug" isTestSource="true" generated="true" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/androidTest/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/androidTest/debug" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/assets" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/aidl" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/jni" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/debug/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/res" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/assets" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/aidl" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/main/rs" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/jni" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/assets" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/blame" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/classes" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/dependency-cache" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/appcompat-v7/23.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/support-v4/23.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/jniLibs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/manifests" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/pre-dexed" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/res" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/rs" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/symbols" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/transforms" />
<excludeFolder url="file://$MODULE_DIR$/build/outputs" />
<excludeFolder url="file://$MODULE_DIR$/build/tmp" />
</content>
<orderEntry type="jdk" jdkName="Android API 23 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" exported="" name="support-annotations-23.1.1" level="project" />
<orderEntry type="library" exported="" name="support-v4-23.1.1" level="project" />
<orderEntry type="library" exported="" name="appcompat-v7-23.1.1" level="project" />
<orderEntry type="library" exported="" name="dagger-1.2.2" level="project" />
<orderEntry type="library" exported="" name="Affdex-sdk" level="project" />
<orderEntry type="library" exported="" name="javax.inject-1" level="project" />
<orderEntry type="library" exported="" name="Affdex-sdk-javadoc" level="project" />
</component>
</module>

View file

@ -36,25 +36,12 @@ android {
}
dependencies {
//include the Affdex SDK jars
compile files('libs/Affdex-sdk.jar')
compile files('libs/Affdex-sdk-javadoc.jar')
//include the Affdex SDK jars and its dependencies
compile fileTree(dir: 'libs', include: '*.jar')
//include dependencies
//include project dependencies
compile 'com.android.support:support-v4:23.1.1'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.squareup.dagger:dagger:1.2.2'
compile 'javax.inject:javax.inject:1'
//although the use of the CameraDetector class in this project does not require it, you may have to include
//the following dependencies if you use other features of the Affdex SDK
// compile 'com.google.code.gson:gson:2.4'
// compile 'com.android.support:support-v13:23.1.1'
// compile 'com.google.android.gms:play-services:8.4.0'
// compile 'com.google.android.gms:play-services-ads:8.4.0'
// compile 'com.google.android.gms:play-services-identity:8.4.0'
// compile 'com.google.android.gms:play-services-gcm:8.4.0'
// compile files('libs/flurry-analytics-5.0.0.jar')
}
// build a signed release apk only if the environment is configured

View file

@ -1,13 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.affectiva.affdexme" >
package="com.affectiva.affdexme">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature
android:name="android.hardware.camera"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.autofocus"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.front"
android:required="false" />
<application
android:name="com.affectiva.errorreporting.CustomApplication"
@ -18,9 +23,8 @@
<activity
android:name=".MainActivity"
android:configChanges="screenSize|keyboardHidden|orientation"
android:screenOrientation="sensorPortrait"
android:theme="@style/MainActivityTheme"
android:label="@string/app_name" >
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
@ -34,7 +38,6 @@
android:name="android.support.PARENT_ACTIVITY"
android:value="com.affectiva.affdexme.MainActivity" />
</activity>
<activity
android:name="com.affectiva.errorreporting.ErrorReporter"
android:theme="@android:style/Theme.DeviceDefault"
@ -44,7 +47,5 @@
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
</activity>
</application>
</manifest>

View file

@ -2,22 +2,32 @@ package com.affectiva.affdexme;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.PointF;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Process;
import android.os.SystemClock;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Pair;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.affectiva.android.affdex.sdk.detector.Face;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* This class contains a SurfaceView and its own thread that draws to it.
@ -25,310 +35,108 @@ import com.affectiva.android.affdex.sdk.detector.Face;
*/
public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
class PointFArraySharer {
boolean isPointsMirrored = false;
PointF[] nextPointsToDraw = null;
}
//Inner Thread class
class DrawingThread extends Thread{
private SurfaceHolder mSurfaceHolder;
private Paint circlePaint;
private Paint boxPaint;
private volatile boolean stopFlag = false; //boolean to indicate when thread has been told to stop
private final PointFArraySharer sharer;
private DrawingViewConfig config;
private final long drawPeriod = 33; //draw at 30 fps
private final int TEXT_RAISE = 10;
String roll = "";
String yaw = "";
String pitch = "";
String interOcDis = "";
public DrawingThread(SurfaceHolder surfaceHolder, DrawingViewConfig con) {
mSurfaceHolder = surfaceHolder;
circlePaint = new Paint();
circlePaint.setColor(Color.WHITE);
boxPaint = new Paint();
boxPaint.setColor(Color.WHITE);
boxPaint.setStyle(Paint.Style.STROKE);
config = con;
sharer = new PointFArraySharer();
setThickness(config.drawThickness);
}
void setMetrics(float roll, float yaw, float pitch, float interOcDis, float valence) {
//format string for our DrawingView to use when ready
this.roll = String.format("%.2f",roll);
this.yaw = String.format("%.2f",yaw);
this.pitch = String.format("%.2f",pitch);
this.interOcDis = String.format("%.2f",interOcDis);
//prepare the color of the bounding box using the valence score. Red for -100, White for 0, and Green for +100, with linear interpolation in between.
if (valence > 0) {
float colorScore = ((100f-valence)/100f)*255;
boxPaint.setColor(Color.rgb((int)colorScore,255,(int)colorScore));
} else {
float colorScore = ((100f+valence)/100f)*255;
boxPaint.setColor(Color.rgb(255, (int) colorScore, (int) colorScore));
}
}
public void stopThread() {
stopFlag = true;
}
public boolean isStopped() {
return stopFlag;
}
//Updates thread with latest points returned by the onImageResults() event.
public void updatePoints(PointF[] pointList, boolean isPointsMirrored) {
synchronized (sharer) {
sharer.nextPointsToDraw = pointList;
sharer.isPointsMirrored = isPointsMirrored;
}
}
void setThickness(int thickness) {
boxPaint.setStrokeWidth(thickness);
}
//Inform thread face detection has stopped, so array of points is no longer valid.
public void invalidatePoints() {
synchronized (sharer) {
sharer.nextPointsToDraw = null;
}
}
@Override
public void run() {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
while(!stopFlag) {
/**
* We use SurfaceHolder.lockCanvas() to get the canvas that draws to the SurfaceView.
* After we are done drawing, we let go of the canvas using SurfaceHolder.unlockCanvasAndPost()
* **/
Canvas c = null;
try {
c = mSurfaceHolder.lockCanvas();
if (c!= null) {
synchronized (mSurfaceHolder) {
c.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); //clear previous dots
draw(c);
}
}
}
finally {
if (c!= null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
config = null; //nullify object to avoid memory leak
}
void draw(Canvas c) {
PointF[] points;
boolean mirrorPoints;
synchronized (sharer) {
if (sharer.nextPointsToDraw == null)
return;
points = sharer.nextPointsToDraw;
mirrorPoints = sharer.isPointsMirrored;
}
//Coordinates around which to draw bounding box.
float leftBx = config.surfaceViewWidth;
float rightBx = 0;
float topBx = config.surfaceViewHeight;
float botBx = 0;
for (int i = 0; i < points.length; i++) {
//transform from the camera coordinates to our screen coordinates
//The camera preview is displayed as a mirror, so X pts have to be mirrored back.
float x;
if (mirrorPoints) {
x = (config.imageWidth - points[i].x) * config.screenToImageRatio;
} else {
x = (points[i].x) * config.screenToImageRatio;
}
float y = (points[i].y)* config.screenToImageRatio;
//We determine the left-most, top-most, right-most, and bottom-most points to draw the bounding box around.
if (x < leftBx)
leftBx = x;
if (x > rightBx)
rightBx = x;
if (y < topBx)
topBx = y;
if (y > botBx)
botBx = y;
//Draw facial tracking dots.
if (config.isDrawPointsEnabled) {
c.drawCircle(x, y, config.drawThickness, circlePaint);
}
}
//Draw the bounding box.
if (config.isDrawPointsEnabled) {
c.drawRect(leftBx, topBx, rightBx, botBx, boxPaint);
}
//Draw the measurement metrics, with a dark border around the words to make them visible for users of all skin colors.
if (config.isDrawMeasurementsEnabled) {
float centerBx = (leftBx + rightBx) / 2;
float upperText = topBx - TEXT_RAISE;
c.drawText("PITCH", centerBx, upperText - config.textSize,config.textBorderPaint);
c.drawText("PITCH", centerBx, upperText - config.textSize, config.textPaint);
c.drawText(pitch,centerBx ,upperText ,config.textBorderPaint);
c.drawText(pitch, centerBx, upperText, config.textPaint);
float upperLeft = centerBx - config.upperTextSpacing;
c.drawText("YAW", upperLeft , upperText - config.textSize , config.textBorderPaint);
c.drawText("YAW", upperLeft, upperText - config.textSize, config.textPaint);
c.drawText(yaw, upperLeft , upperText , config.textBorderPaint);
c.drawText(yaw, upperLeft, upperText, config.textPaint);
float upperRight = centerBx + config.upperTextSpacing;
c.drawText("ROLL", upperRight , upperText - config.textSize , config.textBorderPaint);
c.drawText("ROLL", upperRight, upperText - config.textSize, config.textPaint);
c.drawText(roll, upperRight , upperText , config.textBorderPaint);
c.drawText(roll, upperRight, upperText, config.textPaint);
c.drawText("INTEROCULAR DISTANCE", centerBx , botBx + config.textSize , config.textBorderPaint);
c.drawText("INTEROCULAR DISTANCE", centerBx, botBx + config.textSize, config.textPaint);
c.drawText(interOcDis,centerBx , botBx + config.textSize*2 , config.textBorderPaint);
c.drawText(interOcDis, centerBx, botBx + config.textSize * 2, config.textPaint);
}
}
}
class DrawingViewConfig {
private int imageHeight = 1;
private int imageWidth = 1;
private int surfaceViewWidth = 0;
private int surfaceViewHeight = 0;
private float screenToImageRatio = 0;
private int drawThickness = 0;
private boolean isDrawPointsEnabled = true; //by default, have the drawing thread draw tracking dots
private boolean isDrawMeasurementsEnabled = false;
private boolean isDimensionsNeeded = true;
private Paint textPaint;
private int textSize;
private Paint textBorderPaint;
private int upperTextSpacing;
public void setMeasurementMetricConfigs(Paint textPaint, Paint dropShadowPaint, int textSize, int upperTextSpacing) {
this.textPaint = textPaint;
this.textSize = textSize;
this.textBorderPaint = dropShadowPaint;
this.upperTextSpacing = upperTextSpacing;
}
public void updateViewDimensions(int surfaceViewWidth, int surfaceViewHeight, int imageWidth, int imageHeight) {
if (surfaceViewWidth <= 0 || surfaceViewHeight <= 0 || imageWidth <= 0 || imageHeight <= 0) {
throw new IllegalArgumentException("All dimensions submitted to updateViewDimensions() must be positive");
}
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
this.surfaceViewWidth = surfaceViewWidth;
this.surfaceViewHeight = surfaceViewHeight;
screenToImageRatio = (float)surfaceViewWidth / imageWidth;
isDimensionsNeeded = false;
}
public void setDrawThickness(int t) {
if ( t <= 0) {
throw new IllegalArgumentException("Thickness must be positive.");
}
drawThickness = t;
}
}
//Class variables of DrawingView class
private final static String LOG_TAG = "AffdexMe";
private final float MARGIN = 4;
private Bitmap appearanceMarkerBitmap_genderMale_glassesOn;
private Bitmap appearanceMarkerBitmap_genderFemale_glassesOn;
private Bitmap appearanceMarkerBitmap_genderUnknown_glassesOn;
private Bitmap appearanceMarkerBitmap_genderUnknown_glassesOff;
private Bitmap appearanceMarkerBitmap_genderMale_glassesOff;
private Bitmap appearanceMarkerBitmap_genderFemale_glassesOff;
private Map<String, Bitmap> emojiMarkerBitmapToEmojiTypeMap;
private SurfaceHolder surfaceHolder;
private DrawingThread drawingThread; //DrawingThread object
private Typeface typeface;
private DrawingViewConfig drawingViewConfig;
private static String LOG_TAG = "AffdexMe";
//three constructors required of any custom view
public DrawingView(Context context) {
super(context);
initView(context, null);
}
public DrawingView(Context context, AttributeSet attrs) {
super(context, attrs);
initView(context, attrs);
}
public DrawingView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
initView(context, attrs);
initView();
}
void initView(Context context, AttributeSet attrs){
public DrawingView(Context context, AttributeSet attrs) {
super(context, attrs);
initView();
}
public DrawingView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
initView();
}
private static int getDrawable(@NonNull Context context, @NonNull String name) {
return context.getResources().getIdentifier(name, "drawable", context.getPackageName());
}
void initView() {
surfaceHolder = getHolder(); //The SurfaceHolder object will be used by the thread to request canvas to draw on SurfaceView
surfaceHolder.setFormat(PixelFormat.TRANSPARENT); //set to Transparent so this surfaceView does not obscure the one it is overlaying (the one displaying the camera).
surfaceHolder.addCallback(this); //become a Listener to the three events below that SurfaceView throws
surfaceHolder.addCallback(this); //become a Listener to the three events below that SurfaceView generates
drawingViewConfig = new DrawingViewConfig();
//default values
int upperTextSpacing = 15;
int textSize = 15;
//Default values
Paint emotionLabelPaint = new Paint();
emotionLabelPaint.setColor(Color.parseColor("#ff8000")); //Orange
emotionLabelPaint.setStyle(Paint.Style.FILL);
emotionLabelPaint.setTextAlign(Paint.Align.CENTER);
emotionLabelPaint.setTextSize(48);
Paint measurementTextPaint = new Paint();
measurementTextPaint.setStyle(Paint.Style.FILL);
measurementTextPaint.setTextAlign(Paint.Align.CENTER);
Paint emotionValuePaint = new Paint();
emotionValuePaint.setColor(Color.parseColor("#514a40")); //Grey
emotionValuePaint.setStyle(Paint.Style.FILL);
emotionValuePaint.setTextAlign(Paint.Align.CENTER);
emotionValuePaint.setTextSize(48);
Paint dropShadow = new Paint();
dropShadow.setColor(Color.BLACK);
dropShadow.setStyle(Paint.Style.STROKE);
dropShadow.setTextAlign(Paint.Align.CENTER);
Paint metricBarPaint = new Paint();
metricBarPaint.setColor(Color.GREEN);
metricBarPaint.setStyle(Paint.Style.FILL);
int metricBarWidth = 150;
//load and parse XML attributes
if (attrs != null) {
TypedArray a = getContext().obtainStyledAttributes(attrs,R.styleable.drawing_view_attributes,0,0);
upperTextSpacing = a.getDimensionPixelSize(R.styleable.drawing_view_attributes_measurements_upper_spacing,upperTextSpacing);
measurementTextPaint.setColor(a.getColor(R.styleable.drawing_view_attributes_measurements_color,Color.WHITE));
dropShadow.setColor(a.getColor(R.styleable.drawing_view_attributes_measurements_text_border_color,Color.BLACK));
dropShadow.setStrokeWidth(a.getInteger(R.styleable.drawing_view_attributes_measurements_text_border_thickness,5));
textSize = a.getDimensionPixelSize(R.styleable.drawing_view_attributes_measurements_text_size,textSize);
measurementTextPaint.setTextSize(textSize);
dropShadow.setTextSize(textSize);
int[] emotionLabelAttrs = {
android.R.attr.textStyle, // 0
android.R.attr.textColor, // 1
android.R.attr.shadowColor, // 2
android.R.attr.shadowDy, // 3
android.R.attr.shadowRadius, // 4
android.R.attr.layout_weight, // 5
android.R.attr.textSize}; // 6
TypedArray a = getContext().obtainStyledAttributes(R.style.metricName, emotionLabelAttrs);
if (a != null) {
emotionLabelPaint.setColor(a.getColor(1, emotionLabelPaint.getColor()));
emotionLabelPaint.setShadowLayer(
a.getFloat(4, 1.0f),
a.getFloat(3, 2.0f), a.getFloat(3, 2.0f),
a.getColor(2, Color.BLACK));
emotionLabelPaint.setTextSize(a.getDimensionPixelSize(6, 48));
emotionLabelPaint.setFakeBoldText("bold".equalsIgnoreCase(a.getString(0)));
a.recycle();
}
drawingViewConfig.setMeasurementMetricConfigs(measurementTextPaint, dropShadow, textSize, upperTextSpacing);
int[] emotionValueAttrs = {
android.R.attr.textColor, // 0
android.R.attr.textSize, // 1
R.styleable.custom_attributes_metricBarLength}; // 2
a = getContext().obtainStyledAttributes(R.style.metricPct, emotionValueAttrs);
if (a != null) {
emotionValuePaint.setColor(a.getColor(0, emotionValuePaint.getColor()));
emotionValuePaint.setTextSize(a.getDimensionPixelSize(1, 36));
metricBarWidth = a.getDimensionPixelSize(2, 150);
a.recycle();
}
drawingViewConfig.setDominantEmotionLabelPaints(emotionLabelPaint, emotionValuePaint);
drawingViewConfig.setDominantEmotionMetricBarConfig(metricBarPaint, metricBarWidth);
drawingThread = new DrawingThread(surfaceHolder, drawingViewConfig);
//statically load the emoji bitmaps on-demand and cache
emojiMarkerBitmapToEmojiTypeMap = new HashMap<>();
}
public void setTypeface(Typeface face) {
drawingViewConfig.textPaint.setTypeface(face);
drawingViewConfig.textBorderPaint.setTypeface(face);
drawingViewConfig.dominantEmotionLabelPaint.setTypeface(face);
drawingViewConfig.dominantEmotionValuePaint.setTypeface(face);
}
@Override
@ -353,9 +161,10 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
drawingThread.join();
retry = false;
} catch (InterruptedException e) {
Log.e(LOG_TAG,e.getMessage());
Log.e(LOG_TAG, e.getMessage());
}
}
cleanup();
}
public boolean isDimensionsNeeded() {
@ -368,50 +177,549 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
public void updateViewDimensions(int surfaceViewWidth, int surfaceViewHeight, int imageWidth, int imageHeight) {
try {
drawingViewConfig.updateViewDimensions(surfaceViewWidth,surfaceViewHeight,imageWidth,imageHeight);
} catch (Exception e) {
Log.e(LOG_TAG,e.getMessage());
drawingViewConfig.updateViewDimensions(surfaceViewWidth, surfaceViewHeight, imageWidth, imageHeight);
} catch (IllegalArgumentException e) {
Log.e(LOG_TAG, "Attempted to set a dimension with a negative value", e);
}
}
public void setThickness(int t) {
drawingViewConfig.setDrawThickness(t);
try {
drawingViewConfig.setDrawThickness(t);
drawingThread.setThickness(t);
} catch(Exception e) {
Log.e(LOG_TAG,e.getMessage());
} catch (IllegalArgumentException e) {
Log.e(LOG_TAG, "Attempted to set a thickness with a negative value", e);
}
}
public void setDrawPointsEnabled(boolean b){
drawingViewConfig.isDrawPointsEnabled = b;
}
public boolean getDrawPointsEnabled() {
return drawingViewConfig.isDrawPointsEnabled;
}
public void setDrawMeasurementsEnabled(boolean b) {
drawingViewConfig.isDrawMeasurementsEnabled = b;
public void setDrawPointsEnabled(boolean b) {
drawingViewConfig.isDrawPointsEnabled = b;
}
public boolean getDrawMeasurementsEnabled() {
return drawingViewConfig.isDrawMeasurementsEnabled;
public boolean getDrawAppearanceMarkersEnabled() {
return drawingViewConfig.isDrawAppearanceMarkersEnabled;
}
public void setMetrics(float roll, float yaw, float pitch, float interOcDis, float valence) {
drawingThread.setMetrics(roll,yaw,pitch,interOcDis,valence);
public void setDrawAppearanceMarkersEnabled(boolean b) {
drawingViewConfig.isDrawAppearanceMarkersEnabled = b;
}
public void updatePoints(PointF[] points, boolean isPointsMirrored) {
drawingThread.updatePoints(points, isPointsMirrored);
public boolean getDrawEmojiMarkersEnabled() {
return drawingViewConfig.isDrawEmojiMarkersEnabled;
}
public void invalidatePoints(){
public void setDrawEmojiMarkersEnabled(boolean b) {
drawingViewConfig.isDrawEmojiMarkersEnabled = b;
}
public void updatePoints(List<Face> faces, boolean isPointsMirrored) {
drawingThread.updatePoints(faces, isPointsMirrored);
}
public void invalidatePoints() {
drawingThread.invalidatePoints();
}
/**
* To be called when this view element is potentially being destroyed
* I.E. when the Activity's onPause() gets called.
*/
public void cleanup() {
if (emojiMarkerBitmapToEmojiTypeMap != null) {
for (Bitmap bitmap : emojiMarkerBitmapToEmojiTypeMap.values()) {
bitmap.recycle();
}
emojiMarkerBitmapToEmojiTypeMap.clear();
}
if (appearanceMarkerBitmap_genderMale_glassesOn != null) {
appearanceMarkerBitmap_genderMale_glassesOn.recycle();
}
if (appearanceMarkerBitmap_genderFemale_glassesOn != null) {
appearanceMarkerBitmap_genderFemale_glassesOn.recycle();
}
if (appearanceMarkerBitmap_genderUnknown_glassesOn != null) {
appearanceMarkerBitmap_genderUnknown_glassesOn.recycle();
}
if (appearanceMarkerBitmap_genderUnknown_glassesOff != null) {
appearanceMarkerBitmap_genderUnknown_glassesOff.recycle();
}
if (appearanceMarkerBitmap_genderMale_glassesOff != null) {
appearanceMarkerBitmap_genderMale_glassesOff.recycle();
}
if (appearanceMarkerBitmap_genderFemale_glassesOff != null) {
appearanceMarkerBitmap_genderFemale_glassesOff.recycle();
}
}
class FacesSharer {
boolean isPointsMirrored;
List<Face> facesToDraw;
public FacesSharer() {
isPointsMirrored = false;
facesToDraw = new ArrayList<>();
}
}
//Inner Thread class
class DrawingThread extends Thread {
private final FacesSharer sharer;
private final SurfaceHolder mSurfaceHolder;
private Paint trackingPointsPaint;
private Paint boundingBoxPaint;
private Paint dominantEmotionScoreBarPaint;
private volatile boolean stopFlag = false; //boolean to indicate when thread has been told to stop
private DrawingViewConfig config;
public DrawingThread(SurfaceHolder surfaceHolder, DrawingViewConfig con) {
mSurfaceHolder = surfaceHolder;
//statically load the Appearance marker bitmaps so they only have to load once
appearanceMarkerBitmap_genderMale_glassesOn = ImageHelper.loadBitmapFromInternalStorage(getContext(), "male_glasses.png");
appearanceMarkerBitmap_genderMale_glassesOff = ImageHelper.loadBitmapFromInternalStorage(getContext(), "male_noglasses.png");
appearanceMarkerBitmap_genderFemale_glassesOn = ImageHelper.loadBitmapFromInternalStorage(getContext(), "female_glasses.png");
appearanceMarkerBitmap_genderFemale_glassesOff = ImageHelper.loadBitmapFromInternalStorage(getContext(), "female_noglasses.png");
appearanceMarkerBitmap_genderUnknown_glassesOn = ImageHelper.loadBitmapFromInternalStorage(getContext(), "unknown_glasses.png");
appearanceMarkerBitmap_genderUnknown_glassesOff = ImageHelper.loadBitmapFromInternalStorage(getContext(), "unknown_noglasses.png");
trackingPointsPaint = new Paint();
trackingPointsPaint.setColor(Color.WHITE);
boundingBoxPaint = new Paint();
boundingBoxPaint.setColor(Color.WHITE);
boundingBoxPaint.setStyle(Paint.Style.STROKE);
dominantEmotionScoreBarPaint = new Paint();
dominantEmotionScoreBarPaint.setColor(Color.GREEN);
dominantEmotionScoreBarPaint.setStyle(Paint.Style.STROKE);
config = con;
sharer = new FacesSharer();
setThickness(config.drawThickness);
}
void setValenceOfBoundingBox(float valence) {
//prepare the color of the bounding box using the valence score. Red for -100, White for 0, and Green for +100, with linear interpolation in between.
if (valence > 0) {
float colorScore = ((100f - valence) / 100f) * 255;
boundingBoxPaint.setColor(Color.rgb((int) colorScore, 255, (int) colorScore));
} else {
float colorScore = ((100f + valence) / 100f) * 255;
boundingBoxPaint.setColor(Color.rgb(255, (int) colorScore, (int) colorScore));
}
}
public void stopThread() {
stopFlag = true;
}
public boolean isStopped() {
return stopFlag;
}
//Updates thread with latest faces returned by the onImageResults() event.
public void updatePoints(List<Face> faces, boolean isPointsMirrored) {
synchronized (sharer) {
sharer.facesToDraw.clear();
if (faces != null) {
sharer.facesToDraw.addAll(faces);
}
sharer.isPointsMirrored = isPointsMirrored;
}
}
void setThickness(int thickness) {
boundingBoxPaint.setStrokeWidth(thickness);
}
//Inform thread face detection has stopped, so pending faces are no longer valid.
public void invalidatePoints() {
synchronized (sharer) {
sharer.facesToDraw.clear();
}
}
@Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
while (!stopFlag) {
/**
* We use SurfaceHolder.lockCanvas() to get the canvas that draws to the SurfaceView.
* After we are done drawing, we let go of the canvas using SurfaceHolder.unlockCanvasAndPost()
* **/
Canvas c = null;
try {
c = mSurfaceHolder.lockCanvas();
if (c != null) {
synchronized (mSurfaceHolder) {
c.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); //clear previous dots
draw(c);
}
}
} finally {
if (c != null) {
mSurfaceHolder.unlockCanvasAndPost(c);
}
}
}
config = null; //nullify object to avoid memory leak
}
void draw(Canvas c) {
Face nextFaceToDraw;
boolean mirrorPoints;
boolean multiFaceMode;
int index = 0;
synchronized (sharer) {
mirrorPoints = sharer.isPointsMirrored;
multiFaceMode = sharer.facesToDraw.size() > 1;
if (sharer.facesToDraw.isEmpty()) {
nextFaceToDraw = null;
} else {
nextFaceToDraw = sharer.facesToDraw.get(index);
index++;
}
}
while (nextFaceToDraw != null) {
drawFaceAttributes(c, nextFaceToDraw, mirrorPoints, multiFaceMode);
synchronized (sharer) {
mirrorPoints = sharer.isPointsMirrored;
if (index < sharer.facesToDraw.size()) {
nextFaceToDraw = sharer.facesToDraw.get(index);
index++;
} else {
nextFaceToDraw = null;
}
}
}
}
private void drawFaceAttributes(Canvas c, Face face, boolean mirrorPoints, boolean isMultiFaceMode) {
//Coordinates around which to draw bounding box.
//Default to an 'inverted' box, where the absolute max and min values of the surface view are inside-out
Rect boundingRect = new Rect(config.surfaceViewWidth, config.surfaceViewHeight, 0, 0);
for (PointF point : face.getFacePoints()) {
//transform from the camera coordinates to our screen coordinates
//The camera preview is displayed as a mirror, so X pts have to be mirrored back.
float x;
if (mirrorPoints) {
x = (config.imageWidth - point.x) * config.screenToImageRatio;
} else {
x = (point.x) * config.screenToImageRatio;
}
float y = (point.y) * config.screenToImageRatio;
//For some reason I needed to add each point twice to make sure that all the
//points get properly registered in the bounding box.
boundingRect.union(Math.round(x), Math.round(y));
boundingRect.union(Math.round(x), Math.round(y));
//Draw facial tracking dots.
if (config.isDrawPointsEnabled) {
c.drawCircle(x, y, config.drawThickness, trackingPointsPaint);
}
}
//Draw the bounding box.
if (config.isDrawPointsEnabled) {
drawBoundingBox(c, face, boundingRect);
}
float heightOffset = findNecessaryHeightOffset(boundingRect, face);
//Draw the Appearance markers (gender / glasses)
if (config.isDrawAppearanceMarkersEnabled) {
drawAppearanceMarkers(c, face, boundingRect, heightOffset);
}
//Draw the Emoji markers
if (config.isDrawEmojiMarkersEnabled) {
drawDominantEmoji(c, face, boundingRect, heightOffset);
}
//Only draw the dominant emotion bar in multiface mode
if (isMultiFaceMode) {
drawDominantEmotion(c, face, boundingRect);
}
}
private float findNecessaryHeightOffset(Rect boundingBox, Face face) {
Bitmap appearanceBitmap = getAppearanceBitmapForFace(face);
Bitmap emojiBitmap = getDominantEmojiBitmapForFace(face);
float appearanceBitmapHeight = (appearanceBitmap != null) ? appearanceBitmap.getHeight() : 0;
float emojiBitmapHeight = (emojiBitmap != null) ? emojiBitmap.getHeight() : 0;
float spacingBetween = (appearanceBitmapHeight > 0 && emojiBitmapHeight > 0) ? MARGIN : 0;
float totalHeightRequired = appearanceBitmapHeight + emojiBitmapHeight + spacingBetween;
float bitmapHeightOverflow = Math.max(totalHeightRequired - boundingBox.height(), 0);
return bitmapHeightOverflow / 2; // distribute the overflow evenly on both sides of the bounding box
}
private void drawBoundingBox(Canvas c, Face f, Rect boundingBox) {
setValenceOfBoundingBox(f.emotions.getValence());
c.drawRect(boundingBox.left,
boundingBox.top,
boundingBox.right,
boundingBox.bottom,
boundingBoxPaint);
}
private void drawAppearanceMarkers(Canvas c, Face f, Rect boundingBox, float offset) {
Bitmap bitmap = getAppearanceBitmapForFace(f);
if (bitmap != null) {
drawBitmapIfNotRecycled(c, bitmap, boundingBox.right + MARGIN, boundingBox.bottom - bitmap.getHeight() + offset);
}
}
private Bitmap getAppearanceBitmapForFace(Face f) {
Bitmap bitmap = null;
switch (f.appearance.getGender()) {
case MALE:
if (Face.GLASSES.YES.equals(f.appearance.getGlasses())) {
bitmap = appearanceMarkerBitmap_genderMale_glassesOn;
} else {
bitmap = appearanceMarkerBitmap_genderMale_glassesOff;
}
break;
case FEMALE:
if (Face.GLASSES.YES.equals(f.appearance.getGlasses())) {
bitmap = appearanceMarkerBitmap_genderFemale_glassesOn;
} else {
bitmap = appearanceMarkerBitmap_genderFemale_glassesOff;
}
break;
case UNKNOWN:
if (Face.GLASSES.YES.equals(f.appearance.getGlasses())) {
bitmap = appearanceMarkerBitmap_genderUnknown_glassesOn;
} else {
bitmap = appearanceMarkerBitmap_genderUnknown_glassesOff;
}
break;
default:
Log.e(LOG_TAG, "Unknown gender: " + f.appearance.getGender());
}
return bitmap;
}
private void drawBitmapIfNotRecycled(Canvas c, Bitmap b, float posX, float posY) {
if (!b.isRecycled()) {
c.drawBitmap(b, posX, posY, null);
}
}
private void drawDominantEmoji(Canvas c, Face f, Rect boundingBox, float offset) {
drawEmojiFromCache(c, f.emojis.getDominantEmoji().name(), boundingBox.right + MARGIN, boundingBox.top - offset);
}
private void drawDominantEmotion(Canvas c, Face f, Rect boundingBox) {
Pair<String, Float> dominantMetric = findDominantEmotion(f);
if (dominantMetric == null || dominantMetric.first.isEmpty()) {
return;
}
String emotionText = dominantMetric.first;
String emotionValue = Math.round(dominantMetric.second) + "%";
Rect emotionTextBounds = new Rect();
config.dominantEmotionLabelPaint.getTextBounds(emotionText, 0, emotionText.length(), emotionTextBounds);
Rect emotionValueBounds = new Rect();
config.dominantEmotionValuePaint.getTextBounds(emotionValue, 0, emotionValue.length(), emotionValueBounds);
float drawAtX = boundingBox.exactCenterX();
float drawAtY = boundingBox.bottom + MARGIN + emotionTextBounds.height();
c.drawText(emotionText, drawAtX, drawAtY, config.dominantEmotionLabelPaint);
//draws the colored bar that appears behind our score
drawAtY += MARGIN + emotionValueBounds.height();
int halfWidth = Math.round(config.metricBarWidth / 200.0f * dominantMetric.second);
c.drawRect(drawAtX - halfWidth, drawAtY - emotionValueBounds.height(), drawAtX + halfWidth, drawAtY, config.dominantEmotionMetricBarPaint);
//draws the score
c.drawText(emotionValue, drawAtX, drawAtY, config.dominantEmotionValuePaint);
}
private Pair<String, Float> findDominantEmotion(Face f) {
String dominantMetricName = "";
Float dominantMetricValue = 50.0f; // no emotion is dominant unless at least greater than this value
if (f.emotions.getAnger() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.ANGER);
dominantMetricValue = f.emotions.getAnger();
}
if (f.emotions.getContempt() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.CONTEMPT);
dominantMetricValue = f.emotions.getContempt();
}
if (f.emotions.getDisgust() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.DISGUST);
dominantMetricValue = f.emotions.getDisgust();
}
if (f.emotions.getFear() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.FEAR);
dominantMetricValue = f.emotions.getFear();
}
if (f.emotions.getJoy() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.JOY);
dominantMetricValue = f.emotions.getJoy();
}
if (f.emotions.getSadness() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.SADNESS);
dominantMetricValue = f.emotions.getSadness();
}
if (f.emotions.getSurprise() > dominantMetricValue) {
dominantMetricName = MetricsManager.getCapitalizedName(MetricsManager.Emotions.SURPRISE);
dominantMetricValue = f.emotions.getSurprise();
}
// Ignore VALENCE and ENGAGEMENT
if (dominantMetricName.isEmpty()) {
return null;
} else {
return new Pair<>(dominantMetricName, dominantMetricValue);
}
}
void drawEmojiFromCache(Canvas c, String emojiName, float markerPosX, float markerPosY) {
Bitmap emojiBitmap;
try {
emojiBitmap = getEmojiBitmapByName(emojiName);
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "Error, file not found!", e);
return;
}
if (emojiBitmap != null) {
c.drawBitmap(emojiBitmap, markerPosX, markerPosY, null);
}
}
private Bitmap getDominantEmojiBitmapForFace(Face f) {
try {
return getEmojiBitmapByName(f.emojis.getDominantEmoji().name());
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "Dominant emoji bitmap not available", e);
return null;
}
}
Bitmap getEmojiBitmapByName(String emojiName) throws FileNotFoundException {
// No bitmap necessary if emoji is unknown
if (emojiName.equals(Face.EMOJI.UNKNOWN.name())) {
return null;
}
String emojiResourceName = emojiName.trim().replace(' ', '_').toLowerCase(Locale.US).concat("_emoji");
String emojiFileName = emojiResourceName + ".png";
//Try to get the emoji from the cache
Bitmap desiredEmojiBitmap = emojiMarkerBitmapToEmojiTypeMap.get(emojiFileName);
if (desiredEmojiBitmap != null) {
//emoji bitmap found in the cache
return desiredEmojiBitmap;
}
//Cache miss, try and load the bitmap from disk
desiredEmojiBitmap = ImageHelper.loadBitmapFromInternalStorage(getContext(), emojiFileName);
if (desiredEmojiBitmap != null) {
//emoji bitmap found in the app storage
//Bitmap loaded, add to cache for subsequent use.
emojiMarkerBitmapToEmojiTypeMap.put(emojiFileName, desiredEmojiBitmap);
return desiredEmojiBitmap;
}
Log.d(LOG_TAG, "Emoji not found on disk: " + emojiFileName);
//Still unable to find the file, try to locate the emoji resource
final int resourceId = getDrawable(getContext(), emojiFileName);
if (resourceId == 0) {
//unrecognised emoji file name
throw new FileNotFoundException("Resource not found for file named: " + emojiFileName);
}
desiredEmojiBitmap = BitmapFactory.decodeResource(getResources(), resourceId);
if (desiredEmojiBitmap == null) {
//still unable to load the resource from the file
throw new FileNotFoundException("Resource id [" + resourceId + "] but could not load bitmap: " + emojiFileName);
}
//Bitmap loaded, add to cache for subsequent use.
emojiMarkerBitmapToEmojiTypeMap.put(emojiFileName, desiredEmojiBitmap);
return desiredEmojiBitmap;
}
}
class DrawingViewConfig {
private int imageWidth = 1;
private int surfaceViewWidth = 0;
private int surfaceViewHeight = 0;
private float screenToImageRatio = 0;
private int drawThickness = 0;
private boolean isDrawPointsEnabled = true; //by default, have the drawing thread draw tracking dots
private boolean isDimensionsNeeded = true;
private boolean isDrawAppearanceMarkersEnabled = true; //by default, draw the appearance markers
private boolean isDrawEmojiMarkersEnabled = true; //by default, draw the dominant emoji markers
private Paint dominantEmotionLabelPaint;
private Paint dominantEmotionMetricBarPaint;
private Paint dominantEmotionValuePaint;
private int metricBarWidth;
public void setDominantEmotionLabelPaints(Paint labelPaint, Paint valuePaint) {
dominantEmotionLabelPaint = labelPaint;
dominantEmotionValuePaint = valuePaint;
}
public void setDominantEmotionMetricBarConfig(Paint metricBarPaint, int metricBarWidth) {
dominantEmotionMetricBarPaint = metricBarPaint;
this.metricBarWidth = metricBarWidth;
}
public void updateViewDimensions(int surfaceViewWidth, int surfaceViewHeight, int imageWidth, int imageHeight) {
if (surfaceViewWidth <= 0 || surfaceViewHeight <= 0 || imageWidth <= 0 || imageHeight <= 0) {
throw new IllegalArgumentException("All dimensions submitted to updateViewDimensions() must be positive");
}
this.imageWidth = imageWidth;
this.surfaceViewWidth = surfaceViewWidth;
this.surfaceViewHeight = surfaceViewHeight;
screenToImageRatio = (float) surfaceViewWidth / imageWidth;
isDimensionsNeeded = false;
}
public void setDrawThickness(int t) {
if (t <= 0) {
throw new IllegalArgumentException("Thickness must be positive.");
}
drawThickness = t;
}
}
}

View file

@ -0,0 +1,195 @@
package com.affectiva.affdexme;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import android.util.DisplayMetrics;
import android.util.Log;
import android.widget.ImageView;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class ImageHelper {
private static final String LOG_TAG = ImageHelper.class.getSimpleName();
// Prevent instantiation of this object
private ImageHelper() {
}
public static boolean checkIfImageFileExists(@NonNull Context context, @NonNull String fileName) {
// path to /data/data/yourapp/app_data/images
File directory = context.getDir("images", Context.MODE_PRIVATE);
// File location to save image
File imagePath = new File(directory, fileName);
return imagePath.exists();
}
public static boolean deleteImageFile(@NonNull Context context, @NonNull String fileName) {
// path to /data/data/yourapp/app_data/images
File directory = context.getDir("images", Context.MODE_PRIVATE);
// File location to save image
File imagePath = new File(directory, fileName);
return imagePath.delete();
}
public static void resizeAndSaveResourceImageToInternalStorage(@NonNull Context context, @NonNull String fileName, @NonNull String resourceName) throws FileNotFoundException {
final int resourceId = context.getResources().getIdentifier(resourceName, "drawable", context.getPackageName());
if (resourceId == 0) {
//unrecognised resource
throw new FileNotFoundException("Resource not found for file named: " + resourceName);
}
resizeAndSaveResourceImageToInternalStorage(context, fileName, resourceId);
}
public static void resizeAndSaveResourceImageToInternalStorage(@NonNull Context context, @NonNull String fileName, int resourceId) {
Resources resources = context.getResources();
Bitmap sourceBitmap = BitmapFactory.decodeResource(resources, resourceId);
Bitmap resizedBitmap = resizeBitmapForDeviceDensity(context, sourceBitmap);
saveBitmapToInternalStorage(context, resizedBitmap, fileName);
sourceBitmap.recycle();
resizedBitmap.recycle();
}
public static Bitmap resizeBitmapForDeviceDensity(@NonNull Context context, @NonNull Bitmap sourceBitmap) {
DisplayMetrics metrics = context.getResources().getDisplayMetrics();
int targetWidth = Math.round(sourceBitmap.getWidth() * metrics.density);
int targetHeight = Math.round(sourceBitmap.getHeight() * metrics.density);
return Bitmap.createScaledBitmap(sourceBitmap, targetWidth, targetHeight, false);
}
public static void saveBitmapToInternalStorage(@NonNull Context context, @NonNull Bitmap bitmapImage, @NonNull String fileName) {
// path to /data/data/yourapp/app_data/images
File directory = context.getDir("images", Context.MODE_PRIVATE);
// File location to save image
File imagePath = new File(directory, fileName);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(imagePath);
// Use the compress method on the BitMap object to write image to the OutputStream
bitmapImage.compress(Bitmap.CompressFormat.PNG, 100, fos);
fos.flush();
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "Exception while trying to save file to internal storage: " + imagePath, e);
} catch (IOException e) {
Log.e(LOG_TAG, "Exception while trying to flush the output stream", e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
Log.e(LOG_TAG, "Exception wile trying to close file output stream.", e);
}
}
}
}
public static Bitmap loadBitmapFromInternalStorage(@NonNull Context applicationContext, @NonNull String fileName) {
// path to /data/data/yourapp/app_data/images
File directory = applicationContext.getDir("images", Context.MODE_PRIVATE);
// File location to save image
File imagePath = new File(directory, fileName);
try {
return BitmapFactory.decodeStream(new FileInputStream(imagePath));
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "Exception wile trying to load image: " + imagePath, e);
return null;
}
}
public static void preproccessImageIfNecessary(Context context, String fileName, String resourceName) {
// Set this to true to force the app to always load the images for debugging purposes
final boolean DEBUG = false;
if (ImageHelper.checkIfImageFileExists(context, fileName)) {
// Image file already exists, no need to load the file again.
if (DEBUG) {
Log.d(LOG_TAG, "DEBUG: Deleting: " + fileName);
ImageHelper.deleteImageFile(context, fileName);
} else {
return;
}
}
try {
ImageHelper.resizeAndSaveResourceImageToInternalStorage(context, fileName, resourceName);
Log.d(LOG_TAG, "Resized and saved image: " + fileName);
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "Unable to process image: " + fileName, e);
throw new RuntimeException(e);
}
}
/**
* Returns the bitmap position inside an imageView.
* Source: http://stackoverflow.com/a/26930938
* Author: http://stackoverflow.com/users/1474079/chteuchteu
*
* @param imageView source ImageView
* @return 0: left, 1: top, 2: width, 3: height
*/
public static int[] getBitmapPositionInsideImageView(ImageView imageView) {
int[] ret = new int[4];
if (imageView == null || imageView.getDrawable() == null)
return ret;
// Get image dimensions
// Get image matrix values and place them in an array
float[] f = new float[9];
imageView.getImageMatrix().getValues(f);
// Extract the scale values using the constants (if aspect ratio maintained, scaleX == scaleY)
final float scaleX = f[Matrix.MSCALE_X];
final float scaleY = f[Matrix.MSCALE_Y];
// Get the drawable (could also get the bitmap behind the drawable and getWidth/getHeight)
final Drawable d = imageView.getDrawable();
final int origW = d.getIntrinsicWidth();
final int origH = d.getIntrinsicHeight();
// Calculate the actual dimensions
final int actW = Math.round(origW * scaleX);
final int actH = Math.round(origH * scaleY);
ret[2] = actW;
ret[3] = actH;
// Get image position
// We assume that the image is centered into ImageView
int imgViewW = imageView.getWidth();
int imgViewH = imageView.getHeight();
int top = (imgViewH - actH) / 2;
int left = (imgViewW - actW) / 2;
ret[0] = left;
ret[1] = top;
return ret;
}
}

View file

@ -1,6 +1,7 @@
package com.affectiva.affdexme;
import android.Manifest;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
@ -36,9 +37,11 @@ import com.affectiva.android.affdex.sdk.detector.CameraDetector;
import com.affectiva.android.affdex.sdk.detector.Detector;
import com.affectiva.android.affdex.sdk.detector.Face;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/*
* AffdexMe is an app that demonstrates the use of the Affectiva Android SDK. It uses the
@ -76,20 +79,16 @@ public class MainActivity extends AppCompatActivity
implements Detector.FaceListener, Detector.ImageListener, CameraDetector.CameraEventListener,
View.OnTouchListener, ActivityCompat.OnRequestPermissionsResultCallback {
public static final int MAX_SUPPORTED_FACES = 4;
public static final int NUM_METRICS_DISPLAYED = 6;
private static final String LOG_TAG = "Affectiva";
//Permission-related constants and variables
private static final int AFFDEXME_PERMISSIONS_REQUEST = 42; //value is arbitrary (between 0 and 255)
private boolean cameraPermissionsAvailable = false;
private boolean storagePermissionsAvailable = false;
//Camera variables
int cameraPreviewWidth = 0;
int cameraPreviewHeight = 0;
CameraDetector.CameraType cameraType;
boolean mirrorPoints = false;
//Affectiva SDK Object
private boolean cameraPermissionsAvailable = false;
private CameraDetector detector = null;
//MetricsManager View UI Objects
private RelativeLayout metricViewLayout;
private LinearLayout leftMetricsLayout;
private LinearLayout rightMetricsLayout;
@ -99,8 +98,6 @@ public class MainActivity extends AppCompatActivity
private TextView fpsPct;
private TextView pleaseWaitTextView;
private ProgressBar progressBar;
//Other UI objects
private ViewGroup activityLayout; //top-most ViewGroup in which all content resides
private RelativeLayout mainLayout; //layout, to be resized, containing all UI elements
private RelativeLayout progressBarLayout; //layout used to show progress circle while camera is starting
private LinearLayout permissionsUnavailableLayout; //layout used to notify the user that not enough permissions have been granted to use the app
@ -108,49 +105,62 @@ public class MainActivity extends AppCompatActivity
private DrawingView drawingView; //SurfaceView containing its own thread, used to draw facial tracking dots
private ImageButton settingsButton;
private ImageButton cameraButton;
private Button retryPermissionsButton;
//Application settings variables
private int detectorProcessRate;
private boolean isMenuVisible = false;
private boolean isFPSVisible = false;
private boolean isMenuShowingForFirstTime = true;
//Frames Per Second (FPS) variables
private long firstSystemTime = 0;
private float numberOfFrames = 0;
private long timeToUpdate = 0;
//Camera-related variables
private boolean isFrontFacingCameraDetected = true;
private boolean isBackFacingCameraDetected = true;
private boolean multiFaceModeEnabled = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); //To maximize UI space, we declare our app to be full-screen
preproccessMetricImages();
setContentView(R.layout.activity_main);
initializeUI();
checkForDangerousPermissions();
determineCameraAvailability();
initializeCameraDetector();
}
/**
* Only load the files onto disk the first time the app opens
*/
private void preproccessMetricImages() {
Context context = getBaseContext();
for (Face.EMOJI emoji : Face.EMOJI.values()) {
if (emoji.equals(Face.EMOJI.UNKNOWN)) {
continue;
}
String emojiResourceName = emoji.name().trim().replace(' ', '_').toLowerCase(Locale.US).concat("_emoji");
String emojiFileName = emojiResourceName + ".png";
ImageHelper.preproccessImageIfNecessary(context, emojiFileName, emojiResourceName);
}
ImageHelper.preproccessImageIfNecessary(context, "female_glasses.png", "female_glasses");
ImageHelper.preproccessImageIfNecessary(context, "female_noglasses.png", "female_noglasses");
ImageHelper.preproccessImageIfNecessary(context, "male_glasses.png", "male_glasses");
ImageHelper.preproccessImageIfNecessary(context, "male_noglasses.png", "male_noglasses");
ImageHelper.preproccessImageIfNecessary(context, "unknown_glasses.png", "unknown_glasses");
ImageHelper.preproccessImageIfNecessary(context, "unknown_noglasses.png", "unknown_noglasses");
}
private void checkForDangerousPermissions() {
cameraPermissionsAvailable =
ContextCompat.checkSelfPermission(
getApplicationContext(),
Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
storagePermissionsAvailable =
ContextCompat.checkSelfPermission(
getBaseContext(),
Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED;
if (!cameraPermissionsAvailable || !storagePermissionsAvailable) {
if (!cameraPermissionsAvailable) {
// Should we show an explanation?
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) ||
ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
@ -169,9 +179,6 @@ public class MainActivity extends AppCompatActivity
if (!cameraPermissionsAvailable) {
neededPermissions.add(Manifest.permission.CAMERA);
}
if (!storagePermissionsAvailable) {
neededPermissions.add(Manifest.permission.WRITE_EXTERNAL_STORAGE);
}
ActivityCompat.requestPermissions(
this,
@ -194,13 +201,10 @@ public class MainActivity extends AppCompatActivity
if (permission.equals(Manifest.permission.CAMERA)) {
cameraPermissionsAvailable = (grantResult == PackageManager.PERMISSION_GRANTED);
}
if (permission.equals(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
storagePermissionsAvailable = (grantResult == PackageManager.PERMISSION_GRANTED);
}
}
}
if (!cameraPermissionsAvailable || !storagePermissionsAvailable) {
if (!cameraPermissionsAvailable) {
permissionsUnavailableLayout.setVisibility(View.VISIBLE);
} else {
permissionsUnavailableLayout.setVisibility(View.GONE);
@ -250,21 +254,24 @@ public class MainActivity extends AppCompatActivity
notFoundTextView.setVisibility(View.VISIBLE);
}
//TODO: change this to be taken from settings
if (isBackFacingCameraDetected) {
cameraType = CameraDetector.CameraType.CAMERA_BACK;
mirrorPoints = false;
}
if (isFrontFacingCameraDetected) {
//set default camera settings
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
//restore the camera type settings
String cameraTypeName = sharedPreferences.getString("cameraType", CameraDetector.CameraType.CAMERA_FRONT.name());
if (cameraTypeName.equals(CameraDetector.CameraType.CAMERA_FRONT.name())) {
cameraType = CameraDetector.CameraType.CAMERA_FRONT;
mirrorPoints = true;
} else {
cameraType = CameraDetector.CameraType.CAMERA_BACK;
mirrorPoints = false;
}
}
void initializeUI() {
//Get handles to UI objects
activityLayout = (ViewGroup) findViewById(android.R.id.content);
ViewGroup activityLayout = (ViewGroup) findViewById(android.R.id.content);
progressBarLayout = (RelativeLayout) findViewById(R.id.progress_bar_cover);
permissionsUnavailableLayout = (LinearLayout) findViewById(R.id.permissionsUnavialableLayout);
metricViewLayout = (RelativeLayout) findViewById(R.id.metric_view_group);
@ -279,7 +286,7 @@ public class MainActivity extends AppCompatActivity
cameraButton = (ImageButton) findViewById(R.id.camera_button);
progressBar = (ProgressBar) findViewById(R.id.progress_bar);
pleaseWaitTextView = (TextView) findViewById(R.id.please_wait_textview);
retryPermissionsButton = (Button) findViewById(R.id.retryPermissionsButton);
Button retryPermissionsButton = (Button) findViewById(R.id.retryPermissionsButton);
//Initialize views to display metrics
metricNames = new TextView[NUM_METRICS_DISPLAYED];
@ -336,7 +343,7 @@ public class MainActivity extends AppCompatActivity
activityLayout.setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() {
@Override
public void onSystemUiVisibilityChange(int uiCode) {
if ((uiCode == 0) && (isMenuVisible == false)) {
if ((uiCode == 0) && (!isMenuVisible)) {
setMenuVisible(true);
}
@ -356,8 +363,7 @@ public class MainActivity extends AppCompatActivity
* the camera. If a SurfaceView is passed in as the last argument to the constructor,
* that view will be painted with what the camera sees.
*/
detector = new CameraDetector(this, CameraDetector.CameraType.CAMERA_FRONT, cameraView);
detector = new CameraDetector(this, cameraType, cameraView, (multiFaceModeEnabled ? MAX_SUPPORTED_FACES : 1), Detector.FaceDetectorMode.LARGE_FACES);
// update the license path here if you name your file something else
detector.setLicensePath("license.txt");
@ -378,15 +384,42 @@ public class MainActivity extends AppCompatActivity
isMenuShowingForFirstTime = true;
}
private void setMultiFaceModeEnabled(boolean isEnabled) {
//if setting change is necessary
if (isEnabled != multiFaceModeEnabled) {
// change the setting, stop the detector, and reinitialize it to change the setting
multiFaceModeEnabled = isEnabled;
stopDetector();
initializeCameraDetector();
}
}
/*
* We use the Shared Preferences object to restore application settings.
*/
public void restoreApplicationSettings() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
//restore the camera type settings
String cameraTypeName = sharedPreferences.getString("cameraType", CameraDetector.CameraType.CAMERA_FRONT.name());
if (cameraTypeName.equals(CameraDetector.CameraType.CAMERA_FRONT.name())) {
setCameraType(CameraDetector.CameraType.CAMERA_FRONT);
} else {
setCameraType(CameraDetector.CameraType.CAMERA_BACK);
}
//restore the multiface mode setting to reset the detector if necessary
if (sharedPreferences.getBoolean("multiface", false)) { // default to false
setMultiFaceModeEnabled(true);
} else {
setMultiFaceModeEnabled(false);
}
//restore camera processing rate
detectorProcessRate = PreferencesUtils.getFrameProcessingRate(sharedPreferences);
int detectorProcessRate = PreferencesUtils.getFrameProcessingRate(sharedPreferences);
detector.setMaxProcessRate(detectorProcessRate);
drawingView.invalidateDimensions();
if (sharedPreferences.getBoolean("fps", isFPSVisible)) { //restore isFPSMetricVisible
setFPSVisible(true);
@ -400,16 +433,31 @@ public class MainActivity extends AppCompatActivity
setTrackPoints(false);
}
if (sharedPreferences.getBoolean("measurements", drawingView.getDrawMeasurementsEnabled())) { //restore show measurements
setShowMeasurements(true);
if (sharedPreferences.getBoolean("appearance", drawingView.getDrawAppearanceMarkersEnabled())) {
detector.setDetectAllAppearance(true);
setShowAppearance(true);
} else {
setShowMeasurements(false);
detector.setDetectAllAppearance(false);
setShowAppearance(false);
}
if (sharedPreferences.getBoolean("emoji", drawingView.getDrawEmojiMarkersEnabled())) {
detector.setDetectAllEmojis(true);
setShowEmoji(true);
} else {
detector.setDetectAllEmojis(false);
setShowEmoji(false);
}
//populate metric displays
for (int n = 0; n < NUM_METRICS_DISPLAYED; n++) {
activateMetric(n, PreferencesUtils.getMetricFromPrefs(sharedPreferences, n));
}
//if we are in multiface mode, we need to enable the detection of all emotions
if (multiFaceModeEnabled) {
detector.setDetectAllEmotions(true);
}
}
/**
@ -419,27 +467,43 @@ public class MainActivity extends AppCompatActivity
* -save the Method object that will be invoked on the Face object received in onImageResults() to get the metric score
*/
void activateMetric(int index, MetricsManager.Metrics metric) {
metricNames[index].setText(MetricsManager.getUpperCaseName(metric));
Method getFaceScoreMethod = null; //The method that will be used to get a metric score
try {
//Enable metric detection
Detector.class.getMethod("setDetect" + MetricsManager.getCamelCase(metric), boolean.class).invoke(detector, true);
switch (metric.getType()) {
case Emotion:
Detector.class.getMethod("setDetect" + MetricsManager.getCamelCase(metric), boolean.class).invoke(detector, true);
metricNames[index].setText(MetricsManager.getUpperCaseName(metric));
getFaceScoreMethod = Face.Emotions.class.getMethod("get" + MetricsManager.getCamelCase(metric));
if (metric.getType() == MetricsManager.MetricType.Emotion) {
getFaceScoreMethod = Face.Emotions.class.getMethod("get" + MetricsManager.getCamelCase(metric), null);
//The MetricDisplay for Valence is unique; it shades it color depending on the metric value
if (metric == MetricsManager.Emotions.VALENCE) {
metricDisplays[index].setIsShadedMetricView(true);
} else {
metricDisplays[index].setIsShadedMetricView(false);
}
} else if (metric.getType() == MetricsManager.MetricType.Expression) {
getFaceScoreMethod = Face.Expressions.class.getMethod("get" + MetricsManager.getCamelCase(metric), null);
//The MetricDisplay for Valence is unique; it shades it color depending on the metric value
if (metric == MetricsManager.Emotions.VALENCE) {
metricDisplays[index].setIsShadedMetricView(true);
} else {
metricDisplays[index].setIsShadedMetricView(false);
}
break;
case Expression:
Detector.class.getMethod("setDetect" + MetricsManager.getCamelCase(metric), boolean.class).invoke(detector, true);
metricNames[index].setText(MetricsManager.getUpperCaseName(metric));
getFaceScoreMethod = Face.Expressions.class.getMethod("get" + MetricsManager.getCamelCase(metric));
break;
case Emoji:
detector.setDetectAllEmojis(true);
MetricsManager.Emojis emoji = ((MetricsManager.Emojis) metric);
String metricTitle = emoji.getDisplayName(); // + " " + emoji.getUnicodeForEmoji();
metricNames[index].setText(metricTitle);
Log.d(LOG_TAG, "Getter Method: " + "get" + MetricsManager.getCamelCase(metric));
getFaceScoreMethod = Face.Emojis.class.getMethod("get" + MetricsManager.getCamelCase(metric));
break;
}
} catch (Exception e) {
Log.e(LOG_TAG, String.format("Error using reflection to generate methods for %s", metric.toString()));
} catch (NoSuchMethodException e) {
Log.e(LOG_TAG, String.format("No such method while using reflection to generate methods for %s", metric.toString()), e);
} catch (InvocationTargetException e) {
Log.e(LOG_TAG, String.format("Invocation error while using reflection to generate methods for %s", metric.toString()), e);
} catch (IllegalAccessException e) {
Log.e(LOG_TAG, String.format("Illegal access error while using reflection to generate methods for %s", metric.toString()), e);
}
metricDisplays[index].setMetricToDisplay(metric, getFaceScoreMethod);
@ -474,7 +538,7 @@ public class MainActivity extends AppCompatActivity
void mainWindowResumedTasks() {
//Notify the user that they can't use the app without authorizing these permissions.
if (!cameraPermissionsAvailable || !storagePermissionsAvailable) {
if (!cameraPermissionsAvailable) {
permissionsUnavailableLayout.setVisibility(View.VISIBLE);
return;
}
@ -543,32 +607,34 @@ public class MainActivity extends AppCompatActivity
performFPSCalculations();
//If faces.size() is 0, we received a frame in which no face was detected
if (faces.size() == 0) {
drawingView.updatePoints(null, mirrorPoints); //the drawingView takes null points to mean it doesn't have to draw anything
return;
}
if (faces.size() <= 0) {
drawingView.invalidatePoints();
} else if (faces.size() == 1) {
metricViewLayout.setVisibility(View.VISIBLE);
//The SDK currently detects one face at a time, so we recover it using .get(0).
//'0' indicates we are recovering the first face.
Face face = faces.get(0);
//update metrics with latest face information. The metrics are displayed on a MetricView, a custom view with a .setScore() method.
for (MetricDisplay metricDisplay : metricDisplays) {
updateMetricScore(metricDisplay, faces.get(0));
}
//update metrics with latest face information. The metrics are displayed on a MetricView, a custom view with a .setScore() method.
for (MetricDisplay metricDisplay : metricDisplays) {
updateMetricScore(metricDisplay, face);
}
/**
* If the user has selected to have any facial attributes drawn, we use face.getFacePoints() to send those points
* to our drawing thread and also inform the thread what the valence score was, as that will determine the color
* of the bounding box.
*/
if (drawingView.getDrawPointsEnabled() || drawingView.getDrawAppearanceMarkersEnabled() || drawingView.getDrawEmojiMarkersEnabled()) {
drawingView.updatePoints(faces, mirrorPoints);
}
/**
* If the user has selected to have facial tracking dots or measurements drawn, we use face.getFacePoints() to send those points
* to our drawing thread and also inform the thread what the valence score was, as that will determine the color
* of the bounding box.
*/
if (drawingView.getDrawPointsEnabled() || drawingView.getDrawMeasurementsEnabled()) {
drawingView.setMetrics(face.measurements.orientation.getRoll(), face.measurements.orientation.getYaw(), face.measurements.orientation.getPitch(), face.measurements.getInterocularDistance(), face.emotions.getValence());
drawingView.updatePoints(face.getFacePoints(), mirrorPoints);
} else {
// metrics overlay is hidden in multi face mode
metricViewLayout.setVisibility(View.GONE);
// always update points in multi face mode
drawingView.updatePoints(faces, mirrorPoints);
}
}
/**
* Use the method that we saved in activateMetric() to get the metric score and display it
*/
@ -578,11 +644,18 @@ public class MainActivity extends AppCompatActivity
float score = Float.NaN;
try {
if (metric.getType() == MetricsManager.MetricType.Emotion) {
score = (Float) metricDisplay.getFaceScoreMethod().invoke(face.emotions, null);
metricDisplay.setScore(score);
} else if (metric.getType() == MetricsManager.MetricType.Expression) {
score = (Float) metricDisplay.getFaceScoreMethod().invoke(face.expressions, null);
switch (metric.getType()) {
case Emotion:
score = (Float) metricDisplay.getFaceScoreMethod().invoke(face.emotions);
break;
case Expression:
score = (Float) metricDisplay.getFaceScoreMethod().invoke(face.expressions);
break;
case Emoji:
score = (Float) metricDisplay.getFaceScoreMethod().invoke(face.emojis);
break;
default:
throw new Exception("Unknown Metric Type: " + metric.getType().toString());
}
} catch (Exception e) {
Log.e(LOG_TAG, String.format("Error using reflecting to get %s score from face.", metric.toString()));
@ -633,6 +706,8 @@ public class MainActivity extends AppCompatActivity
detector.setDetectAllEmotions(false);
detector.setDetectAllExpressions(false);
detector.setDetectAllAppearance(false);
detector.setDetectAllEmojis(false);
}
@ -659,10 +734,7 @@ public class MainActivity extends AppCompatActivity
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE);
| View.SYSTEM_UI_FLAG_FULLSCREEN);
settingsButton.setVisibility(View.INVISIBLE);
cameraButton.setVisibility(View.INVISIBLE);
}
@ -686,10 +758,15 @@ public class MainActivity extends AppCompatActivity
drawingView.setDrawPointsEnabled(b);
}
void setShowMeasurements(boolean b) {
drawingView.setDrawMeasurementsEnabled(b);
void setShowAppearance(boolean b) {
drawingView.setDrawAppearanceMarkersEnabled(b);
}
void setShowEmoji(boolean b) {
drawingView.setDrawEmojiMarkersEnabled(b);
}
void setFPSVisible(boolean b) {
isFPSVisible = b;
if (b) {
@ -713,14 +790,7 @@ public class MainActivity extends AppCompatActivity
startActivity(new Intent(this, SettingsActivity.class));
}
/* onCameraStarted is a feature of SDK 2.02, commenting out for 2.01
@Override
public void onCameraStarted(boolean b, Throwable throwable) {
if (throwable != null) {
Toast.makeText(this,"Failed to start camera.",Toast.LENGTH_LONG).show();
}
}*/
@SuppressWarnings("SuspiciousNameCombination")
@Override
public void onCameraSizeSelected(int cameraWidth, int cameraHeight, ROTATE rotation) {
if (rotation == ROTATE.BY_90_CCW || rotation == ROTATE.BY_90_CW) {
@ -774,28 +844,43 @@ public class MainActivity extends AppCompatActivity
public void camera_button_click(View view) {
if (cameraType == CameraDetector.CameraType.CAMERA_FRONT) {
if (isBackFacingCameraDetected) {
cameraType = CameraDetector.CameraType.CAMERA_BACK;
mirrorPoints = false;
} else {
Toast.makeText(this, "No back-facing camera found", Toast.LENGTH_LONG).show();
}
} else if (cameraType == CameraDetector.CameraType.CAMERA_BACK) {
if (isFrontFacingCameraDetected) {
cameraType = CameraDetector.CameraType.CAMERA_FRONT;
mirrorPoints = true;
} else {
Toast.makeText(this, "No front-facing camera found", Toast.LENGTH_LONG).show();
}
}
//Toggle the camera setting
setCameraType(cameraType == CameraDetector.CameraType.CAMERA_FRONT ? CameraDetector.CameraType.CAMERA_BACK : CameraDetector.CameraType.CAMERA_FRONT);
}
performFaceDetectionStoppedTasks();
private void setCameraType(CameraDetector.CameraType type) {
SharedPreferences.Editor preferencesEditor = PreferenceManager.getDefaultSharedPreferences(this).edit();
//If a settings change is necessary
if (cameraType != type) {
switch (type) {
case CAMERA_BACK:
if (isBackFacingCameraDetected) {
cameraType = CameraDetector.CameraType.CAMERA_BACK;
mirrorPoints = false;
} else {
Toast.makeText(this, "No back-facing camera found", Toast.LENGTH_LONG).show();
return;
}
break;
case CAMERA_FRONT:
if (isFrontFacingCameraDetected) {
cameraType = CameraDetector.CameraType.CAMERA_FRONT;
mirrorPoints = true;
} else {
Toast.makeText(this, "No front-facing camera found", Toast.LENGTH_LONG).show();
return;
}
break;
default:
Log.e(LOG_TAG, "Unknown camera type selected");
}
performFaceDetectionStoppedTasks();
try {
detector.setCameraType(cameraType);
} catch (Exception e) {
Log.e(LOG_TAG, e.getMessage());
preferencesEditor.putString("cameraType", cameraType.name());
preferencesEditor.apply();
}
}
}

View file

@ -34,15 +34,17 @@ public class MetricDisplay extends View {
public MetricDisplay(Context context) {
super(context);
initResources(context,null);
initResources(context, null);
}
public MetricDisplay(Context context, AttributeSet attrs) {
super(context,attrs);
initResources(context,attrs);
super(context, attrs);
initResources(context, attrs);
}
public MetricDisplay(Context context, AttributeSet attrs, int styleID){
public MetricDisplay(Context context, AttributeSet attrs, int styleID) {
super(context, attrs, styleID);
initResources(context,attrs);
initResources(context, attrs);
}
void setIsShadedMetricView(boolean b) {
@ -65,11 +67,11 @@ public class MetricDisplay extends View {
//load and parse XML attributes
if (attrs != null) {
TypedArray a = getContext().obtainStyledAttributes(attrs,R.styleable.custom_attributes,0,0);
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.custom_attributes, 0, 0);
textPaint.setColor(a.getColor(R.styleable.custom_attributes_textColor, Color.BLACK));
textSize = a.getDimensionPixelSize(R.styleable.custom_attributes_textSize, textSize);
textPaint.setTextSize(textSize);
halfWidth = a.getDimensionPixelSize(R.styleable.custom_attributes_metricBarLength,100)/2;
halfWidth = a.getDimensionPixelSize(R.styleable.custom_attributes_metricBarLength, 100) / 2;
a.recycle();
} else {
textPaint.setColor(Color.BLACK);
@ -83,7 +85,6 @@ public class MetricDisplay extends View {
*/
height = textSize;
textBottom = height - 5;
}
public void setMetricToDisplay(MetricsManager.Metrics metricToDisplay, Method faceScoreMethod) {
@ -103,7 +104,7 @@ public class MetricDisplay extends View {
textPaint.setTypeface(face);
}
public void setScore(float s){
public void setScore(float s) {
text = String.format("%.0f%%", s); //change the text of the view
//shading mode is turned on for Valence, which causes this view to shade its color according
@ -117,11 +118,11 @@ public class MetricDisplay extends View {
right = midX + (halfWidth * (-s / 100));
}
if (s > 0) {
float colorScore = ((100f-s)/100f)*255;
boxPaint.setColor(Color.rgb((int)colorScore,255,(int)colorScore));
float colorScore = ((100f - s) / 100f) * 255;
boxPaint.setColor(Color.rgb((int) colorScore, 255, (int) colorScore));
} else {
float colorScore = ((100f+s)/100f)*255;
boxPaint.setColor(Color.rgb(255,(int)colorScore,(int)colorScore));
float colorScore = ((100f + s) / 100f) * 255;
boxPaint.setColor(Color.rgb(255, (int) colorScore, (int) colorScore));
}
} else {
left = midX - (halfWidth * (s / 100)); //change the coordinates at which the colored bar will be drawn
@ -133,29 +134,27 @@ public class MetricDisplay extends View {
/**
* set our view to be the minimum of the sizes that Android will allow and our desired sizes
* **/
**/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension((int)Math.min(MeasureSpec.getSize(widthMeasureSpec), halfWidth *2), (int)Math.min(MeasureSpec.getSize(heightMeasureSpec),height));
setMeasuredDimension((int) Math.min(MeasureSpec.getSize(widthMeasureSpec), halfWidth * 2), (int) Math.min(MeasureSpec.getSize(heightMeasureSpec), height));
}
@Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
super.onSizeChanged(w,h,oldW,oldH);
midX = w/2;
midY = h/2;
super.onSizeChanged(w, h, oldW, oldH);
midX = w / 2;
midY = h / 2;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
//draws the colored bar that appears behind our score
canvas.drawRect(left,top,right,height, boxPaint);
canvas.drawRect(left, top, right, height, boxPaint);
//draws the score
canvas.drawText(text,midX , textBottom, textPaint);
canvas.drawText(text, midX, textBottom, textPaint);
}
}

View file

@ -10,6 +10,7 @@ import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Surface;
@ -24,9 +25,9 @@ import java.util.ArrayList;
import java.util.HashMap;
import static com.affectiva.affdexme.MainActivity.NUM_METRICS_DISPLAYED;
/**
* A fragment to display a graphical menu which allows the user to select which metrics to display.
*
*/
public class MetricSelectionFragment extends Fragment implements View.OnClickListener {
@ -84,13 +85,11 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
);
Resources res = getResources();
messageAtOrUnderLimitColor = res.getColor(R.color.white);
messageOverLimitColor = res.getColor(R.color.red);
messageAtOrUnderLimitColor = ContextCompat.getColor(getActivity(), R.color.white);
messageOverLimitColor = ContextCompat.getColor(getActivity(), R.color.red);
}
/**
* A method to populate the metricSelectors array using information from either a saved instance bundle (if the activity is being re-created)
* or sharedPreferences (if the activity is being created for the first time)
@ -110,15 +109,15 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
if (bundle != null) { //if we were passed a bundle, use its data to configure the MetricSelectors
for (MetricsManager.Metrics metric : MetricsManager.getAllMetrics()) {
if (bundle.getBoolean(metric.toString(),false)) {
selectItem(metricSelectors.get(metric),true,false);
if (bundle.getBoolean(metric.toString(), false)) {
selectItem(metricSelectors.get(metric), true, false);
}
}
} else { //otherwise, we pull the data from application preferences
for (int i = 0; i < NUM_METRICS_DISPLAYED; i++) {
MetricsManager.Metrics chosenMetric = PreferencesUtils.getMetricFromPrefs(sharedPreferences, i);
selectItem(metricSelectors.get(chosenMetric),true,false);
selectItem(metricSelectors.get(chosenMetric), true, false);
}
}
}
@ -207,7 +206,7 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
Log.e(LOG_TAG, "Desired Column Width too large! Unable to populate Grid");
return;
}
int columnWidth = (int)((float) gridWidth / (float)numColumns);
int columnWidth = (int) ((float) gridWidth / (float) numColumns);
//This integer reference will be used across methods to keep track of how many rows we have created.
//Each method we pass it into leaves it at a value indicating the next row number that views should be added to.
@ -218,6 +217,10 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
addHeader("Expressions", currentRow, numColumns, inflater);
addGridItems(currentRow, numColumns, inflater, res, columnWidth, MetricsManager.Expressions.values());
// If you wanted to add Emoji as selectable metrics, you would uncomment the two lines below
// addHeader("Emoji", currentRow, numColumns, inflater);
// addGridItems(currentRow, numColumns, inflater, res, columnWidth, MetricsManager.Emojis.values());
gridLayout.setColumnCount(numColumns);
gridLayout.setRowCount(currentRow.value);
}
@ -253,18 +256,21 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
MetricSelector item = metricSelectors.get(metric);
if (item != null) {
GridLayout.LayoutParams params = new GridLayout.LayoutParams();
params.width = size;
params.height = size;
params.columnSpec = GridLayout.spec(col);
params.rowSpec = GridLayout.spec(currentRow.value);
item.setLayoutParams(params);
GridLayout.LayoutParams params = new GridLayout.LayoutParams();
params.width = size;
params.height = size;
params.columnSpec = GridLayout.spec(col);
params.rowSpec = GridLayout.spec(currentRow.value);
item.setLayoutParams(params);
item.setOnClickListener(this);
gridLayout.addView(item);
item.setOnClickListener(this);
gridLayout.addView(item);
} else {
Log.e(this.getClass().getSimpleName(), "Unknown MetricSelector item for Metric: " + metric.toString());
}
}
currentRow.value +=1; //point currentRow to row where next views should be added
currentRow.value += 1; //point currentRow to row where next views should be added
}
@Override
@ -294,18 +300,10 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
metricSelector.setIsSelected(isSelected);
//Create and display message at the top
/*String dMetricsChosen;
if (numberOfSelectedItems == 1) {
dMetricsChosen = "1 metric chosen.";
} else {
dMetricsChosen = String.format("%d metrics chosen.",numberOfSelectedItems);
}*/
if (numberOfSelectedItems == 1) {
metricChooserTextView.setText("1 metric chosen.");
} else {
metricChooserTextView.setText(String.format("%d metrics chosen.",numberOfSelectedItems));
metricChooserTextView.setText(String.format("%d metrics chosen.", numberOfSelectedItems));
}
if (numberOfSelectedItems <= NUM_METRICS_DISPLAYED) {
@ -313,25 +311,11 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
} else {
metricChooserTextView.setTextColor(messageOverLimitColor);
}
/*if (numberOfSelectedItems < NUM_METRICS_DISPLAYED) {
metricChooserTextView.setTextColor(messageAtOrUnderLimitColor);
metricChooserTextView.setText(String.format("%s Choose %d more.", dMetricsChosen, NUM_METRICS_DISPLAYED - numberOfSelectedItems));
} else if (numberOfSelectedItems == NUM_METRICS_DISPLAYED) {
metricChooserTextView.setTextColor(messageAtOrUnderLimitColor);
metricChooserTextView.setText(dMetricsChosen);
} else {
metricChooserTextView.setTextColor(messageOverLimitColor);
metricChooserTextView.setText(String.format("%s Please de-select %d.", dMetricsChosen, numberOfSelectedItems - NUM_METRICS_DISPLAYED));
}*/
}
void clearItems() {
for (MetricsManager.Metrics metric : MetricsManager.getAllMetrics()) {
selectItem(metricSelectors.get(metric),false,true);
selectItem(metricSelectors.get(metric), false, true);
}
updateAllGridItems();
}
@ -349,28 +333,39 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
fragmentMediaPlayer.destroy();
}
/**
* These are not all the MediaPlayer states defined by Android, but they are all the ones we are interested in.
* Note that SafeMediaPlayer never stays in the STOPPED state, so we don't include it.
*/
enum MediaPlayerState {
IDLE, INIT, PREPARED, PLAYING
}
interface OnSafeMediaPlayerPreparedListener {
void onSafeMediaPlayerPrepared();
}
//IntRef represents a reference to a mutable integer value
//It is used to keep track of how many rows have been created in the populateGrid() method
class IntRef {
public int value;
public IntRef() {
value = 0;
}
}
/**
* The MetricSelector objects in this fragment will play a video when selected. To keep memory usage low, we use only one MediaPlayer
* object to control video playback. Video is rendered on a single TextureView.
* Chain of events that lead to video playback:
* -When a MetricSelector is clicked, the MediaPlayer.setDataSource() is called to set the video file
* -The TextureView is added to the view hierarchy of the MetricSelector, causing the onSurfaceTextureAvailable callback to fire
* -The TextureView is bound to the MediaPlayer through MediaPlayer.setSurface(), then MediaPlayer.prepareAsync() is called
* -Once preparation is complete, MediaPlayer.start() is called
* -MediaPlayer.stop() will be called when playback finishes or the item has been de-selected, at which point the TextureView will
* be removed from the MetricSelector's view hierarchy, causing onSurfaceTextureDestroyed(), where we call MediaPlayer.setSurface(null)
* -When a MetricSelector is clicked, the MediaPlayer.setDataSource() is called to set the video file
* -The TextureView is added to the view hierarchy of the MetricSelector, causing the onSurfaceTextureAvailable callback to fire
* -The TextureView is bound to the MediaPlayer through MediaPlayer.setSurface(), then MediaPlayer.prepareAsync() is called
* -Once preparation is complete, MediaPlayer.start() is called
* -MediaPlayer.stop() will be called when playback finishes or the item has been de-selected, at which point the TextureView will
* be removed from the MetricSelector's view hierarchy, causing onSurfaceTextureDestroyed(), where we call MediaPlayer.setSurface(null)
*/
class MetricSelectionFragmentMediaPlayer {
SafeMediaPlayer safePlayer;
@ -429,7 +424,7 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
textureView = new TextureView(getActivity());
textureView.setVisibility(View.GONE);
textureView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
textureView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
@ -457,8 +452,11 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
private void startVideoPlayback(MetricSelector metricSelector) {
videoPlayingSelector = metricSelector;
videoPlayingSelector.initIndex();
safePlayer.setDataSource(metricSelector.getNextVideoResourceURI());
metricSelector.displayVideo(textureView); //will cause onSurfaceTextureAvailable to fire
Uri videoUri = metricSelector.getNextVideoResourceURI();
if (videoUri != null) {
safePlayer.setDataSource(videoUri);
metricSelector.displayVideo(textureView); //will cause onSurfaceTextureAvailable to fire
}
}
private void endVideoPlayback() {
@ -475,9 +473,9 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
void stopMetricSelectorPlayback(MetricSelector metricSelector) {
if (metricSelector == videoPlayingSelector) { //if de-selected item is a playing video, stop it
endVideoPlayback();
}
if (metricSelector == videoPlayingSelector) { //if de-selected item is a playing video, stop it
endVideoPlayback();
}
}
public void destroy() {
@ -491,18 +489,6 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
/**
* These are not all the MediaPlayer states defined by Android, but they are all the ones we are interested in.
* Note that SafeMediaPlayer never stays in the STOPPED state, so we don't include it.
*/
enum MediaPlayerState {
IDLE, INIT, PREPARED, PLAYING
};
interface OnSafeMediaPlayerPreparedListener {
void onSafeMediaPlayerPrepared();
}
/**
* A Facade to ensure our MediaPlayer does not throw an error due to an invalid state change.
*/
@ -595,5 +581,3 @@ public class MetricSelectionFragment extends Fragment implements View.OnClickLis
}
}

View file

@ -1,12 +1,16 @@
package com.affectiva.affdexme;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.net.Uri;
import android.support.v4.content.ContextCompat;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.RelativeLayout;
@ -19,25 +23,35 @@ import android.widget.TextView;
*/
public class MetricSelector extends FrameLayout {
private boolean isMetricSelected;
private MetricsManager.Metrics metric;
TextureView textureView;
TextView gridItemTextView;
ImageView imageView;
ImageView imageViewBeneath;
FrameLayout videoHolder;
RelativeLayout backgroundLayout;
int itemNotSelectedColor;
int itemSelectedColor;
int itemSelectedOverLimitColor;
Uri[] videoResourceURIs;
int videoResourceURIIndex;
TextView videoOverlay;
int picId;
private boolean isMetricSelected;
private boolean isEmoji;
private MetricsManager.Metrics metric;
// These three constructors only provided to allow the UI Editor to properly render this element
public MetricSelector(Context context) {
super(context);
}
public MetricSelector(Context context, AttributeSet attrs) {
super(context, attrs);
}
public MetricSelector(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
public MetricSelector(Activity hostActivity, LayoutInflater inflater, Resources res, String packageName, MetricsManager.Metrics metric) {
super(hostActivity);
@ -45,7 +59,11 @@ public class MetricSelector extends FrameLayout {
this.metric = metric;
this.isMetricSelected = false;
initContent(inflater, res, packageName);
if (metric.getType().equals(MetricsManager.MetricType.Emoji)) {
this.isEmoji = true;
}
initContent(inflater, res, packageName);
}
void initContent(LayoutInflater inflater, Resources res, String packageName) {
@ -55,11 +73,13 @@ public class MetricSelector extends FrameLayout {
videoOverlay = (TextView) content.findViewById(R.id.video_overlay);
int videoId = res.getIdentifier(resourceName,"raw",packageName);
if (metric == MetricsManager.Emotions.VALENCE) {
int videoId = res.getIdentifier(resourceName, "raw", packageName);
if (isEmoji) {
videoResourceURIs = null;
} else if (metric == MetricsManager.Emotions.VALENCE) {
videoResourceURIs = new Uri[2];
videoResourceURIs[0] = Uri.parse(String.format("android.resource://%s/%d", packageName, videoId ));
videoResourceURIs[1] = Uri.parse(String.format("android.resource://%s/%d", packageName, res.getIdentifier(resourceName+"0","raw",packageName)));
videoResourceURIs[0] = Uri.parse(String.format("android.resource://%s/%d", packageName, videoId));
videoResourceURIs[1] = Uri.parse(String.format("android.resource://%s/%d", packageName, res.getIdentifier(resourceName + "0", "raw", packageName)));
} else {
videoResourceURIs = new Uri[1];
videoResourceURIs[0] = Uri.parse(String.format("android.resource://%s/%d", packageName, videoId));
@ -68,6 +88,9 @@ public class MetricSelector extends FrameLayout {
videoResourceURIIndex = 0;
//set up image
if (isEmoji) {
resourceName += "_emoji";
}
picId = res.getIdentifier(resourceName, "drawable", packageName);
imageView = (ImageView) content.findViewById(R.id.grid_item_image_view);
imageViewBeneath = (ImageView) content.findViewById(R.id.grid_item_image_view_beneath);
@ -75,14 +98,15 @@ public class MetricSelector extends FrameLayout {
imageViewBeneath.setImageResource(picId);
imageViewBeneath.setVisibility(GONE);
videoHolder = (FrameLayout) content.findViewById(R.id.video_holder);
backgroundLayout = (RelativeLayout) content.findViewById(R.id.grid_item_background);
gridItemTextView = (TextView) content.findViewById(R.id.grid_item_text);
gridItemTextView.setText(MetricsManager.getCapitalizedName(metric));
itemSelectedOverLimitColor = res.getColor(R.color.grid_item_chosen_over_limit);
itemNotSelectedColor = res.getColor(R.color.grid_item_not_chosen);
itemSelectedColor = res.getColor(R.color.grid_item_chosen);
itemSelectedOverLimitColor = ContextCompat.getColor(getContext(), R.color.grid_item_chosen_over_limit);
itemNotSelectedColor = ContextCompat.getColor(getContext(), R.color.grid_item_not_chosen);
itemSelectedColor = ContextCompat.getColor(getContext(), R.color.grid_item_chosen);
}
boolean getIsSelected() {
@ -105,7 +129,15 @@ public class MetricSelector extends FrameLayout {
void displayVideo(TextureView videoView) {
textureView = videoView;
backgroundLayout.addView(textureView, 1);
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(textureView.getLayoutParams());
// set the video to the same height and width of the actual bitmap inside the imageview
int[] imageAttr = ImageHelper.getBitmapPositionInsideImageView(imageView);
params.width = imageAttr[2]; //width
params.height = imageAttr[3]; //height
textureView.setLayoutParams(params);
videoHolder.addView(textureView);
textureView.setVisibility(VISIBLE);
videoOverlay.setVisibility(VISIBLE);
}
@ -113,7 +145,7 @@ public class MetricSelector extends FrameLayout {
void removeVideo() {
if (textureView != null) {
textureView.setVisibility(GONE);
backgroundLayout.removeView(textureView);
videoHolder.removeView(textureView);
textureView = null;
}
videoOverlay.setVisibility(GONE);
@ -128,12 +160,15 @@ public class MetricSelector extends FrameLayout {
}
Uri getNextVideoResourceURI() {
if (isEmoji) {
return null;
}
if (metric == MetricsManager.Emotions.VALENCE) {
if (videoResourceURIIndex == 0) {
videoOverlay.setText("NEGATIVE");
videoOverlay.setText(R.string.negative);
videoOverlay.setTextColor(Color.RED);
} else {
videoOverlay.setText("POSITIVE");
videoOverlay.setText(R.string.positive);
videoOverlay.setTextColor(Color.GREEN);
}
}
@ -163,7 +198,4 @@ public class MetricSelector extends FrameLayout {
backgroundLayout.setBackgroundColor(itemNotSelectedColor);
}
}
}

View file

@ -1,10 +1,14 @@
package com.affectiva.affdexme;
import com.affectiva.android.affdex.sdk.detector.Face;
import java.util.Locale;
/**
* A class containing:
* -enumerations representing the Emotion and Expressions featured in the Affectiva SDK.
* -a Metric interface to allow easy iteration through all Expressions and Emotions
* -utility methods for converting a Metric into several types of strings
* -enumerations representing the Emotion and Expressions featured in the Affectiva SDK.
* -a Metric interface to allow easy iteration through all Expressions and Emotions
* -utility methods for converting a Metric into several types of strings
*/
public class MetricsManager {
@ -13,67 +17,23 @@ public class MetricsManager {
static {
Emotions[] emotions = Emotions.values();
Expressions[] expressions = Expressions.values();
allMetrics = new Metrics[emotions.length + expressions.length];
System.arraycopy(emotions,0,allMetrics,0,emotions.length);
System.arraycopy(expressions,0,allMetrics,emotions.length,expressions.length);
Emojis[] emojis = Emojis.values();
allMetrics = new Metrics[emotions.length + expressions.length + emojis.length];
System.arraycopy(emotions, 0, allMetrics, 0, emotions.length);
System.arraycopy(expressions, 0, allMetrics, emotions.length, expressions.length);
System.arraycopy(emojis, 0, allMetrics, emotions.length + expressions.length, emojis.length);
}
static Metrics[] getAllMetrics() {
return allMetrics;
}
enum MetricType {Emotion, Expression};
interface Metrics {
MetricType getType();
}
enum Emotions implements Metrics {
ANGER,
DISGUST,
FEAR,
JOY,
SADNESS,
SURPRISE,
CONTEMPT,
ENGAGEMENT,
VALENCE;
@Override
public MetricType getType() {
return MetricType.Emotion;
}
}
enum Expressions implements Metrics {
ATTENTION,
BROW_FURROW,
BROW_RAISE,
CHIN_RAISE,
EYE_CLOSURE,
INNER_BROW_RAISE,
LIP_CORNER_DEPRESSOR,
LIP_PRESS,
LIP_PUCKER,
LIP_SUCK,
MOUTH_OPEN,
NOSE_WRINKLE,
SMILE,
SMIRK,
UPPER_LIP_RAISE;
@Override
public MetricType getType() {
return MetricType.Expression;
}
}
//Used for displays
static String getUpperCaseName(Metrics metric) {
if (metric == Expressions.LIP_CORNER_DEPRESSOR) {
return "FROWN";
} else if (metric.getType().equals(MetricType.Emoji)) {
return ((Emojis) metric).getDisplayName().toUpperCase(Locale.US);
} else {
return metric.toString().replace("_", " ");
}
@ -82,6 +42,9 @@ public class MetricsManager {
//Used for MetricSelectionFragment
//This method is optimized for strings of the form SOME_METRIC_NAME, which all metric names currently are
static String getCapitalizedName(Metrics metric) {
if (metric.getType().equals(MetricType.Emoji)) {
return ((Emojis) metric).getDisplayName();
}
if (metric == Expressions.LIP_CORNER_DEPRESSOR) {
return "Frown";
}
@ -118,7 +81,7 @@ public class MetricsManager {
builder.append(Character.toUpperCase(metricString.charAt(0)));
if (metricString.length() > 1) {
for (int n = 1; n < metricString.length(); n++ ){
for (int n = 1; n < metricString.length(); n++) {
char c = metricString.charAt(n);
if (c == '_') {
n += 1;
@ -134,5 +97,116 @@ public class MetricsManager {
return builder.toString();
}
public enum MetricType {Emotion, Expression, Emoji}
public enum Emotions implements Metrics {
ANGER,
DISGUST,
FEAR,
JOY,
SADNESS,
SURPRISE,
CONTEMPT,
ENGAGEMENT,
VALENCE;
@Override
public MetricType getType() {
return MetricType.Emotion;
}
}
public enum Expressions implements Metrics {
ATTENTION,
BROW_FURROW,
BROW_RAISE,
CHIN_RAISE,
EYE_CLOSURE,
INNER_BROW_RAISE,
LIP_CORNER_DEPRESSOR,
LIP_PRESS,
LIP_PUCKER,
LIP_SUCK,
MOUTH_OPEN,
NOSE_WRINKLE,
SMILE,
SMIRK,
UPPER_LIP_RAISE;
@Override
public MetricType getType() {
return MetricType.Expression;
}
}
public enum Emojis implements Metrics {
RELAXED("Relaxed"),
SMILEY("Smiley"),
LAUGHING("Laughing"),
KISSING("Kiss"),
DISAPPOINTED("Disappointed"),
RAGE("Rage"),
SMIRK("Smirk Emoji"),
WINK("Wink"),
STUCK_OUT_TONGUE_WINKING_EYE("Tongue Wink"),
STUCK_OUT_TONGUE("Tongue Out"),
FLUSHED("Flushed"),
SCREAM("Scream");
private String displayName;
Emojis(String name) {
displayName = name;
}
public static Emojis getEnum(String value) {
for (Emojis v : values())
if (v.displayName.equalsIgnoreCase(value)) return v;
throw new IllegalArgumentException();
}
@Override
public MetricType getType() {
return MetricType.Emoji;
}
public String getDisplayName() {
return displayName;
}
public String getUnicodeForEmoji() {
switch (this) {
case RELAXED:
return Face.EMOJI.RELAXED.getUnicode();
case SMILEY:
return Face.EMOJI.SMILEY.getUnicode();
case LAUGHING:
return Face.EMOJI.LAUGHING.getUnicode();
case KISSING:
return Face.EMOJI.KISSING.getUnicode();
case DISAPPOINTED:
return Face.EMOJI.DISAPPOINTED.getUnicode();
case RAGE:
return Face.EMOJI.RAGE.getUnicode();
case SMIRK:
return Face.EMOJI.SMIRK.getUnicode();
case WINK:
return Face.EMOJI.WINK.getUnicode();
case STUCK_OUT_TONGUE_WINKING_EYE:
return Face.EMOJI.STUCK_OUT_TONGUE_WINKING_EYE.getUnicode();
case STUCK_OUT_TONGUE:
return Face.EMOJI.STUCK_OUT_TONGUE.getUnicode();
case FLUSHED:
return Face.EMOJI.FLUSHED.getUnicode();
case SCREAM:
return Face.EMOJI.SCREAM.getUnicode();
default:
return "";
}
}
}
public interface Metrics {
MetricType getType();
}
}

View file

@ -1,6 +1,7 @@
package com.affectiva.affdexme;
import android.content.SharedPreferences;
import android.util.Log;
/**
* A helper class to translate strings held in preferences into values to be used by the application.
@ -18,39 +19,44 @@ public class PreferencesUtils {
try {
toReturn = Integer.parseInt(rateString);
} catch (Exception e) {
saveFrameProcessingRate(pref,DEFAULT_FPS);
saveFrameProcessingRate(pref, DEFAULT_FPS);
return DEFAULT_FPS;
}
if (toReturn > 0) {
return toReturn;
} else {
saveFrameProcessingRate(pref,DEFAULT_FPS);
saveFrameProcessingRate(pref, DEFAULT_FPS);
return DEFAULT_FPS;
}
}
private static void saveFrameProcessingRate(SharedPreferences pref, int rate) {
SharedPreferences.Editor editor = pref.edit();
editor.putString("rate",String.valueOf(rate));
editor.putString("rate", String.valueOf(rate));
editor.commit();
}
public static MetricsManager.Metrics getMetricFromPrefs(SharedPreferences pref, int index) {
MetricsManager.Metrics metric;
try {
String stringFromPref = pref.getString(String.format("metric_display_%d", index),defaultMetric(index).toString());
metric = parseSavedMetric(stringFromPref );
String stringFromPref = pref.getString(String.format("metric_display_%d", index), defaultMetric(index).toString());
metric = parseSavedMetric(stringFromPref);
} catch (IllegalArgumentException e) {
metric = defaultMetric(index);
SharedPreferences.Editor editor = pref.edit();
editor.putString(String.format("metric_display_%d", index),defaultMetric(index).toString());
editor.putString(String.format("metric_display_%d", index), defaultMetric(index).toString());
editor.commit();
}
return metric;
}
public static void saveMetricToPrefs(SharedPreferences.Editor editor , int index, MetricsManager.Metrics metric) {
editor.putString(String.format("metric_display_%d", index), metric.toString());
public static void saveMetricToPrefs(SharedPreferences.Editor editor, int index, MetricsManager.Metrics metric) {
if (metric.getType().equals(MetricsManager.MetricType.Emoji)) {
MetricsManager.Emojis emoji = (MetricsManager.Emojis) metric;
editor.putString(String.format("metric_display_%d", index), emoji.getDisplayName());
} else {
editor.putString(String.format("metric_display_%d", index), metric.toString());
}
}
static private MetricsManager.Metrics defaultMetric(int index) {
@ -73,23 +79,30 @@ public class PreferencesUtils {
}
/**
* We attempt to parse the string as an Emotion or, failing that, as an Expression.
* We attempt to parse the string as any known metric.
*/
static MetricsManager.Metrics parseSavedMetric(String metricString) throws IllegalArgumentException{
static MetricsManager.Metrics parseSavedMetric(String metricString) throws IllegalArgumentException {
try {
MetricsManager.Emotions emotion;
emotion = MetricsManager.Emotions.valueOf(metricString);
return emotion;
} catch (IllegalArgumentException emotionParseFailed) {
try {
MetricsManager.Expressions expression;
expression = MetricsManager.Expressions.valueOf(metricString);
return expression;
} catch (IllegalArgumentException expressionParseFailed) {
throw new IllegalArgumentException("String did not match an emotion or expression");
}
Log.v(PreferencesUtils.class.getSimpleName(), "Not an Emotion...");
}
try {
MetricsManager.Expressions expression;
expression = MetricsManager.Expressions.valueOf(metricString);
return expression;
} catch (IllegalArgumentException expressionParseFailed) {
Log.v(PreferencesUtils.class.getSimpleName(), "Not an Expression...");
}
try {
MetricsManager.Emojis emoji;
emoji = MetricsManager.Emojis.getEnum(metricString);
return emoji;
} catch (IllegalArgumentException expressionParseFailed) {
Log.v(PreferencesUtils.class.getSimpleName(), "Not an Emoji...");
}
throw new IllegalArgumentException("String did not match any known metric");
}
}

View file

@ -5,6 +5,7 @@ import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.preference.PreferenceActivity;
import android.preference.PreferenceFragment;
import android.support.v4.content.ContextCompat;
import android.view.MenuItem;
import java.util.List;
@ -16,19 +17,11 @@ public class SettingsActivity extends PreferenceActivity {
public void onCreate(Bundle savedBundleInstance) {
super.onCreate(savedBundleInstance);
ActionBar actionBar = getActionBar();
actionBar.setIcon(
new ColorDrawable(getResources().getColor(android.R.color.transparent)));
//actionBar.setDisplayHomeAsUpEnabled(true);
if (actionBar != null) {
actionBar.setIcon(new ColorDrawable(ContextCompat.getColor(getApplicationContext(), R.color.transparent_overlay)));
}
}
/*
@Override
public boolean onNavigateUp() {
this.onBackPressed();
return true;
}*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
@ -49,10 +42,7 @@ public class SettingsActivity extends PreferenceActivity {
//Boilerplate method, required by Android API
@Override
protected boolean isValidFragment(String fragmentName) {
if (SettingsFragment.class.getName().equals(fragmentName) || MetricSelectionFragment.class.getName().equals(fragmentName)) {
return(true);
}
return(false);
return SettingsFragment.class.getName().equals(fragmentName) || MetricSelectionFragment.class.getName().equals(fragmentName);
}
//This fragment shows the preferences for the first header.
@ -65,7 +55,4 @@ public class SettingsActivity extends PreferenceActivity {
addPreferencesFromResource(R.xml.settings_preferences);
}
}
//The second fragment is defined in a separate file.
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 250 B

After

Width:  |  Height:  |  Size: 370 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 201 B

After

Width:  |  Height:  |  Size: 308 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 861 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 882 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1,018 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 232 B

After

Width:  |  Height:  |  Size: 378 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

After

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 290 B

After

Width:  |  Height:  |  Size: 446 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.9 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 232 B

After

Width:  |  Height:  |  Size: 378 B

View file

@ -1,39 +1,59 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_height="match_parent"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:padding="5dp">
<RelativeLayout
android:layout_width="match_parent" android:layout_height="match_parent"
android:id="@+id/grid_item_background"
android:paddingLeft="4dp" android:paddingBottom="8dp" android:paddingRight="4dp">
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="8dp"
android:paddingLeft="4dp"
android:paddingRight="4dp">
<ImageView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:id="@+id/grid_item_image_view_beneath"
/>
<ImageView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@+id/grid_item_text"
android:scaleType="fitCenter" />
<ImageView
android:id="@+id/grid_item_image_view"
/>
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@+id/grid_item_text"
android:scaleType="fitCenter" />
<FrameLayout
android:id="@+id/video_holder"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/grid_item_text"
android:layout_centerHorizontal="true"
android:layout_gravity="center_horizontal"
android:gravity="center_horizontal" />
<TextView
android:id="@+id/grid_item_text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:id="@+id/grid_item_text"
android:textSize="@dimen/grid_item_metric_name"
android:layout_alignParentTop="true"
android:background="@color/transparent_overlay"
android:gravity="center"
android:textColor="@color/white"
android:layout_alignParentTop="true"
/>
android:textSize="@dimen/grid_item_metric_name" />
<TextView
android:id="@+id/video_overlay"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:id="@+id/video_overlay"
android:textSize="@dimen/grid_item_chooser_text_size"
android:layout_below="@+id/grid_item_text"
android:gravity="center"
android:textStyle="bold"
android:padding="2dp"/>
android:padding="2dp"
android:textSize="@dimen/grid_item_chooser_text_size"
android:textStyle="bold" />
</RelativeLayout>
</FrameLayout>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

After

Width:  |  Height:  |  Size: 7.7 KiB

View file

@ -7,9 +7,10 @@
<string name="settings_content_description">Settings</string>
<string name="insufficient_permissions">Insufficient Permissions</string>
<string name="permissions_needed_explanation">This app requires the permission to access your camera to be able to gather facial images to process, and permission to write to the storage on your device so that we can temporarily store video data there while we process it.</string>
<string name="permissions_needed_explanation">This app requires the permission to access your camera to be able to gather facial images to process</string>
<string name="error">Error</string>
<string name="understood">Understood</string>
<string name="retry">Retry</string>
<!--MetricSelectionFragment strings-->
<string name="clear_all">Clear All</string>
@ -19,7 +20,6 @@
<!--SettingsActivity strings-->
<string name="select_metrics_title">Select Metrics</string>
<string name="select_metrics_message">Select emotions and expressions to display.</string>
<string name="settings_title">Settings</string>
<string name="settings_message">Change application settings.</string>
@ -27,14 +27,18 @@
<string name="set_fps_title">Set Target FPS</string>
<string name="set_fps_message">Set the desired processed frames per second.</string>
<string name="fps_edittext_title">Processed Frames Per Second</string>
<string name="show_tracking_title">Show Tracking Dots</string>
<string name="show_tracking_message">Display tracking dots and bounding box.</string>
<string name="show_appearance_title">Show Appearance Indicators</string>
<string name="show_appearance_message">Display appearance markers to the left of the bounding box.</string>
<string name="show_measurements_title">Show Measurements</string>
<string name="show_measurements_message">Display roll, yaw, pitch, and interocular distance.</string>
<string name="show_fps_title">Show FPS</string>
<string name="show_fps_message">Display the actual processed frames per second.</string>
<string name="retry">Retry</string>
<string name="show_emoji_title">Show Emoji Indicators</string>
<string name="show_emoji_message">Display emoji markers adjacent to the bounding box.</string>
<string name="show_multiface_message">Track multiple people. A beta feature that is CPU intensive, and works only on newer devices.</string>
<string name="show_multiface_title">Enable Multi-face mode</string>
<string name="negative">NEGATIVE</string>
<string name="positive">POSITIVE</string>
</resources>

View file

@ -1,27 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<CheckBoxPreference
android:defaultValue="false"
android:key="fps"
android:title="@string/show_fps_title"
android:summary="@string/show_fps_message"
android:defaultValue="false"/>
<CheckBoxPreference
android:key="measurements"
android:title="@string/show_measurements_title"
android:summary="@string/show_measurements_message"
android:defaultValue="false"/>
<CheckBoxPreference
android:key="track"
android:title="@string/show_tracking_title"
android:summary="@string/show_tracking_message"
android:defaultValue="true"/>
android:title="@string/show_fps_title" />
<EditTextPreference
android:key="rate"
android:inputType="number"
android:ems="3"
android:maxLength="2"
android:title="@string/set_fps_title"
android:summary="@string/set_fps_message"
android:defaultValue="20"
android:dialogTitle="@string/fps_edittext_title"
android:defaultValue="20"/>
android:ems="3"
android:inputType="number"
android:key="rate"
android:maxLength="2"
android:summary="@string/set_fps_message"
android:title="@string/set_fps_title" />
<CheckBoxPreference
android:defaultValue="true"
android:key="track"
android:summary="@string/show_tracking_message"
android:title="@string/show_tracking_title" />
<CheckBoxPreference
android:defaultValue="true"
android:key="appearance"
android:summary="@string/show_appearance_message"
android:title="@string/show_appearance_title" />
<CheckBoxPreference
android:defaultValue="true"
android:key="emoji"
android:summary="@string/show_emoji_message"
android:title="@string/show_emoji_title" />
<CheckBoxPreference
android:defaultValue="false"
android:key="multiface"
android:summary="@string/show_multiface_message"
android:title="@string/show_multiface_title" />
</PreferenceScreen>

View file

@ -1,18 +0,0 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m
# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true

Binary file not shown.