diff --git a/AffdexMe/.gitignore b/AffdexMe/.gitignore new file mode 100644 index 0000000..9c4de58 --- /dev/null +++ b/AffdexMe/.gitignore @@ -0,0 +1,7 @@ +.gradle +/local.properties +/.idea/workspace.xml +/.idea/libraries +.DS_Store +/build +/captures diff --git a/AffdexMe/.idea/.name b/AffdexMe/.idea/.name new file mode 100644 index 0000000..df588a5 --- /dev/null +++ b/AffdexMe/.idea/.name @@ -0,0 +1 @@ +AffdexMe \ No newline at end of file diff --git a/AffdexMe/.idea/compiler.xml b/AffdexMe/.idea/compiler.xml new file mode 100644 index 0000000..96cc43e --- /dev/null +++ b/AffdexMe/.idea/compiler.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/.idea/copyright/profiles_settings.xml b/AffdexMe/.idea/copyright/profiles_settings.xml new file mode 100644 index 0000000..e7bedf3 --- /dev/null +++ b/AffdexMe/.idea/copyright/profiles_settings.xml @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/AffdexMe/.idea/gradle.xml b/AffdexMe/.idea/gradle.xml new file mode 100644 index 0000000..3ed2e6c --- /dev/null +++ b/AffdexMe/.idea/gradle.xml @@ -0,0 +1,19 @@ + + + + + + \ No newline at end of file diff --git a/AffdexMe/.idea/misc.xml b/AffdexMe/.idea/misc.xml new file mode 100644 index 0000000..e45faed --- /dev/null +++ b/AffdexMe/.idea/misc.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/.idea/modules.xml b/AffdexMe/.idea/modules.xml new file mode 100644 index 0000000..42ad053 --- /dev/null +++ b/AffdexMe/.idea/modules.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/.idea/vcs.xml b/AffdexMe/.idea/vcs.xml new file mode 100644 index 0000000..6564d52 --- /dev/null +++ b/AffdexMe/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/AffdexMe/AffdexMe.iml b/AffdexMe/AffdexMe.iml new file mode 100644 index 0000000..dfe3cb4 --- /dev/null +++ b/AffdexMe/AffdexMe.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/AffdexMe_original.iml b/AffdexMe/AffdexMe_original.iml new file mode 100644 index 0000000..c05828d --- /dev/null +++ b/AffdexMe/AffdexMe_original.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/app/.gitignore b/AffdexMe/app/.gitignore new file mode 100644 index 0000000..796b96d --- /dev/null +++ b/AffdexMe/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/AffdexMe/app/app.iml b/AffdexMe/app/app.iml new file mode 100644 index 0000000..8d4d2f8 --- /dev/null +++ b/AffdexMe/app/app.iml @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/app/build.gradle b/AffdexMe/app/build.gradle new file mode 100644 index 0000000..bea9053 --- /dev/null +++ b/AffdexMe/app/build.gradle @@ -0,0 +1,38 @@ +apply plugin: 'com.android.application' + +android { + compileSdkVersion 22 + buildToolsVersion '22.0.1' + + defaultConfig { + applicationId "com.affectiva.affdexme" + minSdkVersion 16 + targetSdkVersion 22 + versionCode 11 + versionName "1.0.844" + } + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } +} + +dependencies { + //gson is necessary for the license to be parsed + compile 'com.google.code.gson:gson:2.3' + + //include the Affdex SDK jars + compile files('libs/Affdex-sdk.jar') + compile files('libs/Affdex-sdk-javadoc.jar') + compile files('libs/dagger-1.2.2.jar') + compile files('libs/flurry-analytics-4.1.0.jar') + compile files('libs/javax.inject-1.jar') + + //although the use of the CameraDetector class in this project does not require it, you may have to include + //the following dependencies if you use other features of the Affdex SDK + //compile 'com.android.support:support-v4:22.2.0' + //compile 'com.google.android.gms:play-services:7.5.0' + +} diff --git a/AffdexMe/app/build.xml b/AffdexMe/app/build.xml new file mode 100644 index 0000000..0cec068 --- /dev/null +++ b/AffdexMe/app/build.xml @@ -0,0 +1,92 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AffdexMe/app/proguard-rules.pro b/AffdexMe/app/proguard-rules.pro new file mode 100644 index 0000000..6914cbb --- /dev/null +++ b/AffdexMe/app/proguard-rules.pro @@ -0,0 +1,29 @@ +# Add project specific ProGuard rules here. +# By default, the flags in this file are appended to flags specified +# in /Users/AlanCasalas/Library/Android/sdk/tools/proguard/proguard-android.txt +# You can edit the include path and order by changing the proguardFiles +# directive in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# Add any project specific keep options here: + +#prevent proguard from warning us about not including the GooglePlay dependency +-dontwarn ** + +#keep all classes (otherwise Proguard may remove classes that use reflection, injection, Gson, etc...) +-keep class sun.** +-keep class com.** +-keep class android.** +-keep class dagger.** +-keep class javax.** + +#keep certain class members (otherwise Proguard would strip the members of these classes) +-keepclassmembers class com.affectiva.android.affdex.sdk.detector.License { *; } +-keepclassmembers class com.affectiva.android.affdex.sdk.detector.A* { *; } +-keepclassmembers class * { + @javax.inject.* *; + @dagger.* *; + (); +} \ No newline at end of file diff --git a/AffdexMe/app/src/main/AndroidManifest.xml b/AffdexMe/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..2415da8 --- /dev/null +++ b/AffdexMe/app/src/main/AndroidManifest.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/AffdexMe/app/src/main/assets/fonts/Square.ttf b/AffdexMe/app/src/main/assets/fonts/Square.ttf new file mode 100644 index 0000000..9f1867e Binary files /dev/null and b/AffdexMe/app/src/main/assets/fonts/Square.ttf differ diff --git a/AffdexMe/app/src/main/java/com/affectiva/affdexme/DrawingView.java b/AffdexMe/app/src/main/java/com/affectiva/affdexme/DrawingView.java new file mode 100644 index 0000000..b2c8bb7 --- /dev/null +++ b/AffdexMe/app/src/main/java/com/affectiva/affdexme/DrawingView.java @@ -0,0 +1,271 @@ +package com.affectiva.affdexme; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.graphics.PointF; +import android.graphics.PorterDuff; +import android.os.Process; +import android.os.SystemClock; +import android.util.AttributeSet; +import android.util.Log; +import android.view.SurfaceHolder; +import android.view.SurfaceView; + + +/** + * This class contains a SurfaceView and its own thread that draws to it. + * It is used to display the facial tracking dots over a user's face. + */ +public class DrawingView extends SurfaceView implements SurfaceHolder.Callback { + + //Inner Thread class + class DrawingThread extends Thread{ + private SurfaceHolder mSurfaceHolder; + private Paint circlePaint; + private Paint boxPaint; + private boolean stopFlag = false; //boolean to indicate when thread has been told to stop + private PointF[] nextPointsToDraw = null; //holds a reference to the most recent set of points returned by CameraDetector, passed in by main thread + boolean isDrawPointsEnabled = false; //saves whether user has selected dots to be drawn + float imageWidth = 0; + float imageHeight = 0; + float screenToImageRatio = 0; + float drawThickness = 0; //thickness with which dots and square will be drawn + + private final long drawPeriod = 33; //draw at 30 fps + + public DrawingThread(SurfaceHolder surfaceHolder, boolean drawPoints) { + mSurfaceHolder = surfaceHolder; + + circlePaint = new Paint(); + circlePaint.setColor(Color.WHITE); + + boxPaint = new Paint(); + boxPaint.setColor(Color.WHITE); + boxPaint.setStyle(Paint.Style.STROKE); + + isDrawPointsEnabled = drawPoints; + } + + /** + * Used to set the valence score, which determines the color of the bounding box. + * **/ + void setScore(float s) { + if (s > 0) { + float colorScore = ((100f-s)/100f)*255; + boxPaint.setColor(Color.rgb((int)colorScore,255,(int)colorScore)); + } else { + float colorScore = ((100f+s)/100f)*255; + boxPaint.setColor(Color.rgb(255,(int)colorScore,(int)colorScore)); + } + } + + public void stopThread() { + stopFlag = true; + } + + public boolean isStopped() { + return stopFlag; + } + + //Updates thread with latest points returned by the onImageResults() event. + public void updatePoints(PointF[] pointList) { + nextPointsToDraw = pointList; + } + + //Sets measurements thread will use to draw facial tracking dots. + public void setDimen(int w, int h, float appToImageRatio, float thickness) { + imageWidth = w; + imageHeight = h; + screenToImageRatio = appToImageRatio; + drawThickness = thickness; + boxPaint.setStrokeWidth(thickness); + } + + private void setDrawPointsEnabled(boolean b) { + isDrawPointsEnabled = b; + } + + private boolean getDrawPointsEnabled() { + return isDrawPointsEnabled; + } + + //Inform thread face detection has stopped, so array of points is no longer valid. + public void invalidatePoints() { + nextPointsToDraw = null; + } + + @Override + public void run() { + android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); + + while(!stopFlag) { + long startTime = SystemClock.elapsedRealtime(); //get time at the start of thread loop + + /** + * We use SurfaceHolder.lockCanvas() to get the canvas that draws to the SurfaceView. + * After we are done drawing, we let go of the canvas using SurfaceHolder.unlockCanvasAndPost() + * **/ + Canvas c = null; + try { + c = mSurfaceHolder.lockCanvas(); + + if (c!= null) { + synchronized (mSurfaceHolder) { + c.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); //clear previous dots + if (isDrawPointsEnabled && (nextPointsToDraw != null) ) { + draw(c); + } + } + } + } + finally { + if (c!= null) { + mSurfaceHolder.unlockCanvasAndPost(c); + } + } + + //send thread to sleep so we don't draw faster than the requested 'drawPeriod'. + long sleepTime = drawPeriod - (SystemClock.elapsedRealtime() - startTime); + try { + if(sleepTime>0){ + this.sleep(sleepTime); + } + } catch (InterruptedException ex) { + Log.e(LOG_TAG,ex.getMessage()); + } + } + } + + void draw(Canvas c) { + //Save our own reference to the list of points, in case the previous reference is overwritten by the main thread. + PointF[] points = nextPointsToDraw; + + //Coordinates around which to draw bounding box. + float leftBx = imageWidth; + float rightBx = 0; + float topBx = imageHeight; + float botBx = 0; + + //Iterate through all the points given to us by the CameraDetector object + for (int i = 0; i < points.length; i++) { + + //We determine the left-most, top-most, right-most, and bottom-most points to draw the bounding box around. + if (points[i].x < leftBx) + leftBx = points[i].x; + if (points[i].x > rightBx) + rightBx = points[i].x; + if (points[i].y < topBx) + topBx = points[i].y; + if (points[i].y > botBx) + botBx = points[i].y; + + //Draw facial tracking dots. + //The camera preview is displayed as a mirror, so X pts have to be reversed + c.drawCircle((imageWidth - points[i].x - 1) * screenToImageRatio, (points[i].y)* screenToImageRatio, drawThickness, circlePaint); + } + + //Draw the bounding box. + c.drawRect((imageWidth - leftBx - 1) * screenToImageRatio, topBx * screenToImageRatio, (imageWidth - rightBx - 1) * screenToImageRatio, botBx * screenToImageRatio, boxPaint); + } + } + + //Class variables of DrawingView class + private SurfaceHolder surfaceHolder; + private DrawingThread drawingThread; //DrawingThread object + private boolean isDimensionsNeeded = true; + private boolean isDrawPointsEnabled = true; //by default, start drawing thread without drawing points + private static String LOG_TAG = "AffdexMe"; + + //three constructors required of any custom view + public DrawingView(Context context) { + super(context); + initView(); + } + public DrawingView(Context context, AttributeSet attrs) { + super(context, attrs); + initView(); + } + public DrawingView(Context context, AttributeSet attrs, int defStyle) { + super(context, attrs, defStyle); + initView(); + } + + void initView(){ + surfaceHolder = getHolder(); //The SurfaceHolder object will be used by the thread to request canvas to draw on SurfaceView + surfaceHolder.setFormat(PixelFormat.TRANSPARENT); //set to Transparent so this surfaceView does not obscure the one it is overlaying (the one displaying the camera). + surfaceHolder.addCallback(this); //become a Listener to the three events below that SurfaceView throws + drawingThread = new DrawingThread(surfaceHolder, isDrawPointsEnabled); + } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + if (drawingThread.isStopped()) { + drawingThread = new DrawingThread(surfaceHolder, isDrawPointsEnabled); + } + drawingThread.start(); + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + //command thread to stop, and wait until it stops + boolean retry = true; + drawingThread.stopThread(); + while (retry) { + try { + drawingThread.join(); + retry = false; + } catch (InterruptedException e) { + Log.e(LOG_TAG,e.getMessage()); + } + } + isDimensionsNeeded = true; //Now that thread has been destroyed, we'll need dimensions to be recalculated if a new thread is later created. + } + + public void setDimensions(int width, int height, float appToImageRatio, float radius) { + drawingThread.setDimen(width, height, appToImageRatio, radius); + isDimensionsNeeded = false; + } + + public boolean isDimensionsNeeded() { + return isDimensionsNeeded; + } + + public void setDrawPointsEnabled(boolean b){ + isDrawPointsEnabled = b; + drawingThread.setDrawPointsEnabled(b); + } + + public void invalidateDimensions() { + isDimensionsNeeded = true; + } + + public boolean getDrawPointsEnabled() { + return isDrawPointsEnabled; + } + + //The methods below simply delegate to the drawingThread object + public void setScore(float s) { + drawingThread.setScore(s); + } + + public void updatePoints(PointF[] points) { + drawingThread.updatePoints(points); + } + + public void invalidatePoints(){ + drawingThread.invalidatePoints(); + } + + + + + + +} diff --git a/AffdexMe/app/src/main/java/com/affectiva/affdexme/GradientMetricView.java b/AffdexMe/app/src/main/java/com/affectiva/affdexme/GradientMetricView.java new file mode 100644 index 0000000..0a9f181 --- /dev/null +++ b/AffdexMe/app/src/main/java/com/affectiva/affdexme/GradientMetricView.java @@ -0,0 +1,48 @@ +package com.affectiva.affdexme; + +import android.content.Context; +import android.graphics.Color; +import android.util.AttributeSet; + +/** + * GradientMetricView is used to display the valence metric and adds functionality of allowing + * the bar's shade of color to scale with the metric's value, rather than just being red or green. + */ +public class GradientMetricView extends MetricView { + + //Three Constructors required of any custom view: + public GradientMetricView(Context context) { + super(context); + } + public GradientMetricView(Context context, AttributeSet attrs) { + super(context, attrs); + } + public GradientMetricView(Context context, AttributeSet attrs, int styleID){ + super(context, attrs, styleID); + } + + /** + * As in MetricView, we set our text to display the score and size the colored bar appropriately. + * In this class, however, we let the score determine the color of the bar (shades of red for negative + * and shades of green for positive). + */ + @Override + public void setScore(float s) { + text = String.format("%d%%", (int)s); + if (s > 0) { + left = midX - (halfWidth * (s / 100)); + right = midX + (halfWidth * (s / 100)); + } else { + left = midX - (halfWidth * (-s / 100)); + right = midX + (halfWidth * (-s / 100)); + } + if (s > 0) { + float colorScore = ((100f-s)/100f)*255; + boxPaint.setColor(Color.rgb((int)colorScore,255,(int)colorScore)); + } else { + float colorScore = ((100f+s)/100f)*255; + boxPaint.setColor(Color.rgb(255,(int)colorScore,(int)colorScore)); + } + invalidate(); //instruct Android to re-draw our view, now that the text has changed + } +} diff --git a/AffdexMe/app/src/main/java/com/affectiva/affdexme/MainActivity.java b/AffdexMe/app/src/main/java/com/affectiva/affdexme/MainActivity.java new file mode 100644 index 0000000..f04ade7 --- /dev/null +++ b/AffdexMe/app/src/main/java/com/affectiva/affdexme/MainActivity.java @@ -0,0 +1,632 @@ +package com.affectiva.affdexme; + +import android.app.Activity; +import android.content.Context; +import android.content.SharedPreferences; +import android.content.pm.PackageManager; +import android.graphics.Typeface; +import android.os.Bundle; +import android.os.SystemClock; +import android.util.Log; +import android.view.KeyEvent; +import android.view.MotionEvent; +import android.view.SurfaceView; +import android.view.View; +import android.view.ViewGroup; +import android.view.WindowManager; +import android.view.inputmethod.EditorInfo; +import android.view.inputmethod.InputMethodManager; +import android.widget.CheckBox; +import android.widget.EditText; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.RelativeLayout; +import android.widget.TextView; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.InputStreamReader; +import java.util.List; + +import com.affectiva.android.affdex.sdk.Frame; +import com.affectiva.android.affdex.sdk.Frame.ROTATE; +import com.affectiva.android.affdex.sdk.detector.CameraDetector; +import com.affectiva.android.affdex.sdk.detector.Detector; +import com.affectiva.android.affdex.sdk.detector.Face; + +/* + * AffdexMe is an app that demonstrates the use of the Affectiva Android SDK. It uses the + * front-facing camera on your Android device to view, process and analyze live video of your face. + * Start the app and you will see your own face on the screen and metrics describing your + * expressions. + * + * Tapping the screen will bring up a menu with options to display the Processed Frames Per Second metric, + * display facial tracking points, and control the rate at which frames are processed by the SDK. + * + * Most of the methods in this file control the application's UI. Therefore, if you are just interested in learning how the Affectiva SDK works, + * you will find the calls relevant to the use of the SDK in the startCamera() and stopCamera() methods, as well as the onImageResults() method. + * + * This class implements the Detector.ImageListener interface, allowing it to receive the onImageResults() event. + * This class implements the Detector.FaceListener interface, allowing it to receive the onFaceDetectionStarted() and + * onFaceDetectionStopped() events. + * + * In order to use this project, you will need to: + * - Obtain the SDK from Affectiva (visit http://www.affdex.com/mobile-sdk) + * - Copy the SDK assets folder contents into this project's assets folder + * - Copy the SDK libs folder contents into this project's libs folder + * - Copy the armeabi-v7a folder (found in the SDK libs folder) into this project's jniLibs folder + * - Add your license file to the /assets/Affdex folder and uncomment the line in the startCamera() method + * to type in your license file name + * - Build the project + * - Run the app on an Android device with a front-facing camera + * + * Copyright (c) 2014 Affectiva. All rights reserved. + */ + +public class MainActivity extends Activity + implements Detector.FaceListener, Detector.ImageListener, TextView.OnEditorActionListener, View.OnTouchListener{ + + private static final String LOG_TAG = "AffdexMe"; + + //Affectiva SDK Object + private CameraDetector detector = null; + + //Metrics View UI Objects + private RelativeLayout metricViewLayout; + private LinearLayout leftMetricsLayout; + private LinearLayout rightMetricsLayout; + private MetricView smilePct; + private MetricView browRaisePct; + private MetricView browFurrowPct; + private MetricView frownPct; + private MetricView valencePct; + private MetricView engagementPct; + private TextView fpsName; + private TextView fpsPct; + private TextView smileName; + private TextView browRaiseName; + private TextView browFurrowName; + private TextView frownName; + private TextView valenceName; + private TextView engagementName; + + //Menu UI Objects + private RelativeLayout menuLayout; + private EditText fpsEditText; + private CheckBox fpsCheckbox; + private CheckBox trackingCheckbox; + private TextView fpsEditTextName; + + //Other UI objects + private ViewGroup activityLayout; //top-most ViewGroup in which all content resides + private RelativeLayout mainLayout; //layout, to be resized, containing all UI elements + private RelativeLayout progressBarLayout; //layout used to show progress circle while camera is starting + private SurfaceView cameraView; //SurfaceView used to display camera images + private DrawingView drawingView; //SurfaceView containing its own thread, used to draw facial tracking dots + + //The Shared Preferences object is used to restore/save settings when activity is created/destroyed + private final String PREFS_NAME = "AffdexMe"; + SharedPreferences sharedPreferences; + + //Application settings variables + private int detectorProcessRate = 20; + private boolean isMenuVisible = false; + private boolean isFPSVisible = false; + + //Frames Per Second (FPS) variables + private long firstSystemTime = 0; + private float numberOfFrames = 0; + private long timeToUpdate = 0; + + private boolean isFrontFacingCameraDetected = true; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); //To maximize UI space, we declare our app to be full-screen + setContentView(R.layout.activity_main); + + /** + * We check to make sure the device has a front-facing camera. + * If it does not, we obscure the app with a notice informing the user they cannot + * use the app. + */ + if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) { + isFrontFacingCameraDetected = false; + TextView notFoundTextView = (TextView) findViewById(R.id.not_found_textview); + notFoundTextView.setVisibility(View.VISIBLE); + } + + initializeUI(); + } + + void initializeUI() { + + //Get handles to UI objects + activityLayout = (ViewGroup) findViewById(android.R.id.content); + progressBarLayout = (RelativeLayout) findViewById(R.id.progress_bar_cover); + metricViewLayout = (RelativeLayout) findViewById(R.id.metric_view_group); + leftMetricsLayout = (LinearLayout) findViewById(R.id.left_metrics); + rightMetricsLayout = (LinearLayout) findViewById(R.id.right_metrics); + mainLayout = (RelativeLayout) findViewById(R.id.main_layout); + menuLayout = (RelativeLayout) findViewById(R.id.affdexme_menu); + smilePct = (MetricView) findViewById(R.id.smile_pct); + browRaisePct = (MetricView) findViewById(R.id.brow_raise_pct); + browFurrowPct = (MetricView) findViewById(R.id.brow_furrow_pct); + frownPct = (MetricView) findViewById(R.id.frown_pct); + valencePct = (MetricView) findViewById(R.id.valence_pct); + engagementPct = (MetricView) findViewById(R.id.engagement_pct); + fpsPct = (TextView) findViewById(R.id.fps_value); + smileName = (TextView) findViewById(R.id.smile_name); + browRaiseName = (TextView) findViewById(R.id.brow_raise_name); + browFurrowName = (TextView) findViewById(R.id.brow_furrow_name); + frownName = (TextView) findViewById(R.id.frown_name); + valenceName = (TextView) findViewById(R.id.valence_name); + engagementName = (TextView) findViewById(R.id.engagement_name); + fpsName = (TextView) findViewById(R.id.fps_name); + fpsEditText = (EditText) findViewById(R.id.fps_edittext); + fpsEditTextName = (TextView) findViewById(R.id.fps_edittext_name); + fpsCheckbox = (CheckBox) findViewById(R.id.fps_checkbox); + trackingCheckbox = (CheckBox) findViewById(R.id.tracking_checkbox); + cameraView = (SurfaceView) findViewById(R.id.camera_preview); + drawingView = (DrawingView) findViewById(R.id.drawing_view); + + //Load Application Font and set UI Elements to use it + Typeface face = Typeface.createFromAsset(getAssets(), "fonts/Square.ttf"); + smilePct.setTypeface(face); + browRaisePct.setTypeface(face); + browFurrowPct.setTypeface(face); + frownPct.setTypeface(face); + valencePct.setTypeface(face); + engagementPct.setTypeface(face); + smileName.setTypeface(face); + browRaiseName.setTypeface(face); + browFurrowName.setTypeface(face); + frownName.setTypeface(face); + valenceName.setTypeface(face); + engagementName.setTypeface(face); + fpsPct.setTypeface(face); + fpsName.setTypeface(face); + fpsEditTextName.setTypeface(face); + fpsCheckbox.setTypeface(face); + trackingCheckbox.setTypeface(face); + + //Hide left and right metrics by default (will be made visible when face detection starts) + leftMetricsLayout.setAlpha(0); + rightMetricsLayout.setAlpha(0); + + /** + * This app uses two SurfaceView objects: one to display the camera image and the other to draw facial tracking dots. + * Since we want the tracking dots to appear over the camera image, we use SurfaceView.setZOrderMediaOverlay() to indicate that + * cameraView represents our 'media', and drawingView represents our 'overlay', so that Android will render them in the + * correct order. + */ + drawingView.setZOrderMediaOverlay(true); + cameraView.setZOrderMediaOverlay(false); + + //Attach event listeners to the menu and edit box + activityLayout.setOnTouchListener(this); + menuLayout.setOnTouchListener(new View.OnTouchListener() { + @Override + public boolean onTouch(View v, MotionEvent event) { + /** + * This method effectively blocks the mainLayout from receiving a touch event + * when the menu is pressed. This is to prevent the menu from closing if the user accidentally touches it + * when aiming for a checkbox or edit box. + */ + return true; + } + }); + fpsEditText.setOnEditorActionListener(this); + + /** + * This app sets the View.SYSTEM_UI_FLAG_HIDE_NAVIGATION flag. Unfortunately, this flag causes + * Android to steal the first touch event after the navigation bar has been hidden, a touch event + * which should be used to make our menu visible again. Therefore, we attach a listener to be notified + * when the navigation bar has been made visible again, which corresponds to the touch event that Android + * steals. If the menu bar was not visible, we make it visible. + */ + activityLayout.setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() { + @Override + public void onSystemUiVisibilityChange(int uiCode) { + if ((uiCode == 0) && (isMenuVisible == false)) { + setMenuVisible(true); + } + + } + }); + } + + /** + * We use onResume() to restore application settings using the SharedPreferences object and + * to indicate that dimensions should be recalculated. + */ + @Override + public void onResume() { + super.onResume(); + restoreApplicationSettings(); + drawingView.invalidateDimensions(); //set flag to have screen dimensions resized (usage appears in onImageResults()) + setMenuVisible(false); //always make the menu invisible by default + } + + /** + * We use the Shared Preferences object to restore application settings. + * **/ + public void restoreApplicationSettings() { + sharedPreferences = getSharedPreferences(PREFS_NAME, 0); + + detectorProcessRate = sharedPreferences.getInt("rate", detectorProcessRate); //restore camera processing rate + fpsEditText.setText(String.valueOf(detectorProcessRate)); + + if (sharedPreferences.getBoolean("fps",isFPSVisible)) { //restore isFPSMetricVisible + fpsCheckbox.setChecked(true); + setFPSVisible(true); + } else { + fpsCheckbox.setChecked(false); + setFPSVisible(false); + } + + if (sharedPreferences.getBoolean("track",drawingView.getDrawPointsEnabled())) { //restore isTrackingDotsVisible + setTrackPoints(true); + trackingCheckbox.setChecked(true); + } else { + setTrackPoints(false); + trackingCheckbox.setChecked(false); + } + } + + /** + * Reset the variables used to calculate processed frames per second. + * **/ + public void resetFPSCalculations() { + firstSystemTime = SystemClock.elapsedRealtime(); + timeToUpdate = firstSystemTime + 1000L; + numberOfFrames = 0; + } + + /** + * We start the camera as soon as the application has been given focus, which occurs as soon as the application has + * been opened or reopened. Although this can also occur when the application regains focus after a dialog box has been closed, + * the camera will not be reinitialized because the detector object will not have been set to null during onPause(). + * We also reset variables used to calculate the Processed Frames Per Second. + */ + @Override + public void onWindowFocusChanged(boolean hasFocus) { + if (hasFocus && isFrontFacingCameraDetected) { + startCamera(); + resetFPSCalculations(); + } + } + + void startCamera() { + if (detector == null) { + /** Put the SDK in camera mode by using this constructor. The SDK will be in control of + * the camera. If a SurfaceView is passed in as the last argument to the constructor, + * that view will be painted with what the camera sees. + */ + detector = new CameraDetector(this, CameraDetector.CameraType.CAMERA_FRONT, cameraView); + + // NOTE: uncomment the line below and replace "YourLicenseFile" with your license file, which should be stored in /assets/Affdex/ + //detector.setLicensePath("YourLicenseFile"); + + // We want to detect all expressions, so turn on all classifiers. + detector.setDetectSmile(true); + detector.setDetectBrowFurrow(true); + detector.setDetectBrowRaise(true); + detector.setDetectEngagement(true); + detector.setDetectValence(true); + detector.setDetectLipCornerDepressor(true); + + detector.setMaxProcessRate(detectorProcessRate); + + detector.setImageListener(this); + detector.setFaceListener(this); + + //now that the CameraDetector object has been set up, start the camera + try { + detector.start(); + } catch (Exception e) { + Log.e(LOG_TAG, e.getMessage()); + } + } + } + + + @Override + public void onFaceDetectionStarted() { + leftMetricsLayout.animate().alpha(1); //make left and right metrics appear + rightMetricsLayout.animate().alpha(1); + resetFPSCalculations(); //Since the FPS may be different whether a face is being tracked or not, reset variables. + + } + + @Override + public void onFaceDetectionStopped() { + performFaceDetectionStoppedTasks(); + } + + void performFaceDetectionStoppedTasks() { + leftMetricsLayout.animate().alpha(0); //make left and right metrics disappear + rightMetricsLayout.animate().alpha(0); + drawingView.invalidatePoints(); //inform the drawing thread that the latest facial tracking points are now invalid + resetFPSCalculations(); //Since the FPS may be different whether a face is being tracked or not, reset variables. + } + + + /** + * This event is received every time the SDK processes a frame. + */ + @Override + public void onImageResults(List faces, Frame image, float timeStamp) { + /** + * If the flag indicating that we need to size our layout is set, call calculateDimensions(). + * The flag is a boolean stored in our drawingView object, retrieved through DrawingView.isDimensionsNeeded(). + */ + if (drawingView.isDimensionsNeeded() ) { + calculateDimensions(image); + } + + //If the faces object is null, we received an unprocessed frame + if (faces == null) { + return; + } + + //At this point, we know the frame received was processed, so we perform our processed frames per second calculations + performFPSCalculations(); + + //If faces.size() is 0, we received a frame in which no face was detected + if (faces.size() == 0) { + return; + } + + //The SDK currently detects one face at a time, so we recover it using .get(0). + //'0' indicates we are recovering the first face. + Face face = faces.get(0); + + //update metrics with latest face information. The metrics are displayed on a MetricView, a custom view with a .setScore() method. + smilePct.setScore(face.getSmileScore()); + browRaisePct.setScore(face.getBrowRaiseScore()); + browFurrowPct.setScore(face.getBrowFurrowScore()); + engagementPct.setScore(face.getEngagementScore()); + frownPct.setScore(face.getLipCornerDepressorScore()); + float valenceScore = face.getValenceScore(); + valencePct.setScore(valenceScore); + + /** + * If the user has selected to have facial tracking dots drawn, we use face.getFacePoints() to send those points + * to our drawing thread and also inform the thread what the valence score was, as that will determine the color + * of the bounding box. + */ + if (drawingView.getDrawPointsEnabled()) { + drawingView.setScore(valenceScore); + drawingView.updatePoints(face.getFacePoints()); + } + } + + /** + * This method serves two purposes: + * -It informs the drawing thread of the size of the frames passed by the CameraDetector object. + * -It corrects the dimensions of our mainLayout object to conform to the aspect ratio of the frames passed by the CameraDetector object. + */ + void calculateDimensions(Frame image){ + //Log.i(LOG_TAG,"Dimensions being re-calculated"); + float screenWidth = activityLayout.getWidth(); + float screenHeight = activityLayout.getHeight(); + float referenceDimension = screenHeight; //referenceDimension will be used to determine the size of the facial tracking dots + + //get size of frames being passed by camera + float imageWidth = image.getWidth(); + float imageHeight = image.getHeight(); + + /** + * If device is rotated vertically, reverse the width and height returned by the Frame object, + * and switch the dimension we consider to be the reference dimension. + */ + if ((ROTATE.BY_90_CW == image.getTargetRotation()) || (ROTATE.BY_90_CCW == image.getTargetRotation())) { + float temp = imageWidth; + imageWidth = imageHeight; + imageHeight = temp; + + referenceDimension = screenWidth; + } + + /** + * In this section, we resize our layouts so that the SurfaceView displaying the camera images to will have the same + * aspect ratio as the frames we are receiving from the camera. + * Since all elements in our app are inside 'mainLayout', we just have to adjust the height and width of this layout. + */ + + //calculate aspect ratios of camera frames and screen + float imageAspectRatio = imageWidth/imageHeight; + float screenAspectRatio = screenWidth/screenHeight; + float screenToImageRatio = 0; + int newLayoutHeight = 0; + int newLayoutWidth = 0; + + if (screenAspectRatio < imageAspectRatio) { + newLayoutHeight = (int) (screenWidth / imageAspectRatio); + screenToImageRatio = screenWidth / imageWidth; + newLayoutWidth = (int)screenWidth; + } else { + newLayoutWidth = (int) (screenHeight * imageAspectRatio); + screenToImageRatio = screenHeight/imageHeight; + newLayoutHeight = (int)screenHeight; + } + + + FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) mainLayout.getLayoutParams(); + params.height = newLayoutHeight; + params.width = newLayoutWidth; + mainLayout.setLayoutParams(params); + + /** + * Send necessary dimensions to the drawing thread. + * The dimensions are: width of frame, height of frame, ratio of screen to frame size, and thickness of facial tracking dots. + * This method will clear the flag that indicates whether we need to calculate dimensions, so this calculateDimensions() + * will not be continuously called. + */ + drawingView.setDimensions((int) imageWidth, (int) imageHeight, screenToImageRatio, referenceDimension / 160); + + //Now that the aspect ratio has been corrected, remove the progress bar from obscuring the screen + progressBarLayout.setVisibility(View.GONE); + } + + /** + * FPS measurement simply uses SystemClock to measure how many frames were processed since + * the FPS variables were last reset. + * The constants 1000L and 1000f appear because .elapsedRealtime() measures time in milliseconds. + * Note that if 20 frames per second are processed, this method could run for 1.5 years without being reset + * before numberOfFrames overflows. + */ + void performFPSCalculations() { + numberOfFrames += 1; + long currentTime = SystemClock.elapsedRealtime(); + if (currentTime > timeToUpdate) { + float framesPerSecond = (numberOfFrames/(float)(currentTime - firstSystemTime))*1000f; + fpsPct.setText(String.format(" %.1f",framesPerSecond)); + timeToUpdate = currentTime + 1000L; + } + } + + /** + * Although we start the camera in onWindowFocusChanged(), we stop it in onPause(), and set detector to be null so that when onWindowFocusChanged() + * is called it restarts the camera. We also set the Progress Bar to be visible, so the camera (which may need resizing when the app + * is resumed) is obscured. + */ + @Override + public void onPause() { + super.onPause(); + saveApplicationSettings(); + progressBarLayout.setVisibility(View.VISIBLE); + stopCamera(); + } + + private void stopCamera() { + performFaceDetectionStoppedTasks(); + + if (null != detector) { + try { + detector.stop(); + } catch (Exception e) { + Log.e("AffdexMe", e.getMessage()); + } + } + detector = null; //setting detector to null will allow startCamera() to recreate the detector object when the application is reopened. + } + + /** + * We use the SharedPreferences object to save application settings. + **/ + public void saveApplicationSettings() { + SharedPreferences.Editor editor = sharedPreferences.edit(); + editor.putBoolean("fps", isFPSVisible); + editor.putBoolean("track", drawingView.getDrawPointsEnabled()); + editor.putInt("rate", detectorProcessRate); + editor.commit(); + } + + public void fps_checkbox_click(View view) { + setFPSVisible(((CheckBox) view).isChecked()); + } + + public void tracking_checkbox_click(View view) { + setTrackPoints(((CheckBox) view).isChecked()); + } + + @Override + public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { + /** + * When a user has selected the Edit box to change the number of frames the detector processes per second + * and presses the 'DONE' button, the below block will be executed. + * */ + if (actionId == EditorInfo.IME_ACTION_DONE) { + int parsedInt = 0; + try { + parsedInt = Integer.parseInt(v.getText().toString()); + } catch (Exception e) { + v.setText(String.valueOf(detectorProcessRate)); + return false; + } + if (parsedInt > 0) { + detectorProcessRate = parsedInt; + detector.setMaxProcessRate(detectorProcessRate); + resetFPSCalculations(); //reset FPS variables, since changing the process rate should change the FPS. + } else { + v.setText(String.valueOf(detectorProcessRate)); + } + } + return false; //return false regardless, so Android closes the keyboard when user presses 'DONE' + } + + /** + * When the user taps the screen, hide the menu if it is visible and show it if it is hidden. + * **/ + void setMenuVisible(boolean b){ + isMenuVisible = b; + if (b) { + menuLayout.setVisibility(View.VISIBLE); + + //We display the navigation bar again + getWindow().getDecorView().setSystemUiVisibility( + View.SYSTEM_UI_FLAG_LAYOUT_STABLE + | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION + | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN); + } + else { + InputMethodManager imm = (InputMethodManager)getSystemService( + Context.INPUT_METHOD_SERVICE); + imm.hideSoftInputFromWindow(fpsEditText.getWindowToken(), 0); + + //We hide the navigation bar + getWindow().getDecorView().setSystemUiVisibility( + View.SYSTEM_UI_FLAG_LAYOUT_STABLE + | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION + | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN + | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION + | View.SYSTEM_UI_FLAG_FULLSCREEN + | View.SYSTEM_UI_FLAG_IMMERSIVE); + + + menuLayout.setVisibility(View.INVISIBLE); + } + } + + /** + * If a user has a phone with a physical menu button, they may expect it to toggle + * the menu, so we add that functionality. + */ + @Override + public boolean onKeyDown(int keyCode, KeyEvent event) { + if (keyCode == KeyEvent.KEYCODE_MENU) { + setMenuVisible(!isMenuVisible); + return true; + } + return super.onKeyDown(keyCode, event); + } + + //If the user selects to have facial tracking dots drawn, inform our drawing thread. + void setTrackPoints(boolean b) { + drawingView.setDrawPointsEnabled(b); + } + + void setFPSVisible(boolean b) { + isFPSVisible = b; + if (b) { + fpsName.setVisibility(View.VISIBLE); + fpsPct.setVisibility(View.VISIBLE); + } else { + fpsName.setVisibility(View.INVISIBLE); + fpsPct.setVisibility(View.INVISIBLE); + } + } + + @Override + public boolean onTouch(View v, MotionEvent event) { + if (event.getAction() == MotionEvent.ACTION_DOWN) { + setMenuVisible(!isMenuVisible); + } + return false; + } +} + + diff --git a/AffdexMe/app/src/main/java/com/affectiva/affdexme/MetricView.java b/AffdexMe/app/src/main/java/com/affectiva/affdexme/MetricView.java new file mode 100644 index 0000000..9b300fe --- /dev/null +++ b/AffdexMe/app/src/main/java/com/affectiva/affdexme/MetricView.java @@ -0,0 +1,115 @@ +package com.affectiva.affdexme; + +import android.content.Context; +import android.content.res.TypedArray; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Typeface; +import android.util.AttributeSet; +import android.view.View; +import java.lang.Math; + +/** + * The MetricView class is used to display metric scores on top of colored bars whose color depend on the score. + */ +public class MetricView extends View { + + float midX = 0; //coordinates of the center of the view + float midY = 0; + float halfWidth = 50;//default width and height of view + float height = 10; + String text = ""; //score in text format + Paint textPaint; + Paint boxPaint; + float left = 0; //colored bar is drawn using left,right,top, and height variables + float right = 0; + float top = 0; + float textBottom = 0; //tells our view where to draw the baseline of the font + + public MetricView(Context context) { + super(context); + initResources(context,null); + } + public MetricView(Context context, AttributeSet attrs) { + super(context,attrs); + initResources(context,attrs); + } + public MetricView(Context context, AttributeSet attrs, int styleID){ + super(context, attrs, styleID); + initResources(context,attrs); + } + + void initResources(Context context, AttributeSet attrs) { + + boxPaint = new Paint(); + boxPaint.setColor(Color.GREEN); + + textPaint = new Paint(); + textPaint.setStyle(Paint.Style.FILL); + textPaint.setTextAlign(Paint.Align.CENTER); + + int textSize = 15; //default text size value + + //load and parse XML attributes + if (attrs != null) { + TypedArray a = getContext().obtainStyledAttributes(attrs,R.styleable.custom_attributes,0,0); + textPaint.setColor(a.getColor(R.styleable.custom_attributes_textColor, Color.BLACK)); + textSize = a.getDimensionPixelSize(R.styleable.custom_attributes_textSize, textSize); + textPaint.setTextSize(textSize); + halfWidth = a.getDimensionPixelSize(R.styleable.custom_attributes_barLength,100)/2; + a.recycle(); + } else { + textPaint.setColor(Color.BLACK); + textPaint.setTextSize(textSize); + } + + /** + * We set the desired height of the view to be as large as our text. + * We also offset the bottom line at which our text is drawn, to give the appearance + * that the text is centered vertically. + */ + height = textSize; + textBottom = height - 5; + + } + + public void setTypeface(Typeface face) { + textPaint.setTypeface(face); + } + + public void setScore(float s){ + text = String.format("%.0f%%", s); //change the text of the view + left = midX - (halfWidth * (s / 100)); //change the coordinates at which the colored bar will be drawn + right = midX + (halfWidth * (s / 100)); + invalidate(); //instruct Android to re-draw our view, now that the text has changed + } + + /** + * set our view to be the minimum of the sizes that Android will allow and our desired sizes + * **/ + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + setMeasuredDimension((int)Math.min(MeasureSpec.getSize(widthMeasureSpec), halfWidth *2), (int)Math.min(MeasureSpec.getSize(heightMeasureSpec),height)); + } + + @Override + protected void onSizeChanged(int w, int h, int oldW, int oldH) { + super.onSizeChanged(w,h,oldW,oldH); + midX = w/2; + midY = h/2; + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + //draws the colored bar that appears behind our score + canvas.drawRect(left,top,right,height, boxPaint); + //draws the score + canvas.drawText(text,midX , textBottom, textPaint); + } + + + + +} diff --git a/AffdexMe/app/src/main/res/drawable-hdpi/affectiva_logo_clear_background.png b/AffdexMe/app/src/main/res/drawable-hdpi/affectiva_logo_clear_background.png new file mode 100644 index 0000000..4778791 Binary files /dev/null and b/AffdexMe/app/src/main/res/drawable-hdpi/affectiva_logo_clear_background.png differ diff --git a/AffdexMe/app/src/main/res/drawable-mdpi/affectiva_logo_clear_background.png b/AffdexMe/app/src/main/res/drawable-mdpi/affectiva_logo_clear_background.png new file mode 100644 index 0000000..2885705 Binary files /dev/null and b/AffdexMe/app/src/main/res/drawable-mdpi/affectiva_logo_clear_background.png differ diff --git a/AffdexMe/app/src/main/res/drawable-xhdpi/affectiva_logo_clear_background.png b/AffdexMe/app/src/main/res/drawable-xhdpi/affectiva_logo_clear_background.png new file mode 100644 index 0000000..ed553b9 Binary files /dev/null and b/AffdexMe/app/src/main/res/drawable-xhdpi/affectiva_logo_clear_background.png differ diff --git a/AffdexMe/app/src/main/res/drawable-xxhdpi/affectiva_logo_clear_background.png b/AffdexMe/app/src/main/res/drawable-xxhdpi/affectiva_logo_clear_background.png new file mode 100644 index 0000000..484b96b Binary files /dev/null and b/AffdexMe/app/src/main/res/drawable-xxhdpi/affectiva_logo_clear_background.png differ diff --git a/AffdexMe/app/src/main/res/layout/activity_main.xml b/AffdexMe/app/src/main/res/layout/activity_main.xml new file mode 100644 index 0000000..f9c2ad1 --- /dev/null +++ b/AffdexMe/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + diff --git a/AffdexMe/app/src/main/res/layout/menu_layout.xml b/AffdexMe/app/src/main/res/layout/menu_layout.xml new file mode 100644 index 0000000..2a85b8b --- /dev/null +++ b/AffdexMe/app/src/main/res/layout/menu_layout.xml @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/layout/metric_layout.xml b/AffdexMe/app/src/main/res/layout/metric_layout.xml new file mode 100644 index 0000000..fd9acd8 --- /dev/null +++ b/AffdexMe/app/src/main/res/layout/metric_layout.xml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/mipmap-hdpi/ic_launcher.png b/AffdexMe/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000..563dd9f Binary files /dev/null and b/AffdexMe/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/AffdexMe/app/src/main/res/mipmap-mdpi/ic_launcher.png b/AffdexMe/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000..b930cda Binary files /dev/null and b/AffdexMe/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/AffdexMe/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/AffdexMe/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000..514621e Binary files /dev/null and b/AffdexMe/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/AffdexMe/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/AffdexMe/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000..8db4bba Binary files /dev/null and b/AffdexMe/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/AffdexMe/app/src/main/res/values-large/dimens.xml b/AffdexMe/app/src/main/res/values-large/dimens.xml new file mode 100644 index 0000000..62daeb5 --- /dev/null +++ b/AffdexMe/app/src/main/res/values-large/dimens.xml @@ -0,0 +1,10 @@ + + 24sp + 14sp + 18sp + 130dp + 10dp + 190dp + 180dp + 15dp + diff --git a/AffdexMe/app/src/main/res/values-normal/dimens.xml b/AffdexMe/app/src/main/res/values-normal/dimens.xml new file mode 100644 index 0000000..409136a --- /dev/null +++ b/AffdexMe/app/src/main/res/values-normal/dimens.xml @@ -0,0 +1,10 @@ + + 14sp + 11sp + 13sp + 80dp + 5dp + 130dp + 120dp + 10dp + diff --git a/AffdexMe/app/src/main/res/values-small/dimens.xml b/AffdexMe/app/src/main/res/values-small/dimens.xml new file mode 100644 index 0000000..1651c62 --- /dev/null +++ b/AffdexMe/app/src/main/res/values-small/dimens.xml @@ -0,0 +1,10 @@ + + 12sp + 9sp + 12sp + 70dp + 5dp + 130dp + 100dp + 8dp + diff --git a/AffdexMe/app/src/main/res/values-xlarge/dimens.xml b/AffdexMe/app/src/main/res/values-xlarge/dimens.xml new file mode 100644 index 0000000..f1b3f07 --- /dev/null +++ b/AffdexMe/app/src/main/res/values-xlarge/dimens.xml @@ -0,0 +1,11 @@ + + + 28sp + 18sp + 23sp + 140dp + 15dp + 220dp + 250dp + 25dp + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/attrs.xml b/AffdexMe/app/src/main/res/values/attrs.xml new file mode 100644 index 0000000..803712e --- /dev/null +++ b/AffdexMe/app/src/main/res/values/attrs.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/colors.xml b/AffdexMe/app/src/main/res/values/colors.xml new file mode 100644 index 0000000..231fb0c --- /dev/null +++ b/AffdexMe/app/src/main/res/values/colors.xml @@ -0,0 +1,7 @@ + + + #55ffffff + #514a40 + #ff8000 + #000000 + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/dimens.xml b/AffdexMe/app/src/main/res/values/dimens.xml new file mode 100644 index 0000000..d95eff9 --- /dev/null +++ b/AffdexMe/app/src/main/res/values/dimens.xml @@ -0,0 +1,10 @@ + + 14sp + 11sp + 13sp + 80dp + 5dp + 140dp + 120dp + 10dp + diff --git a/AffdexMe/app/src/main/res/values/metricStyleName.xml b/AffdexMe/app/src/main/res/values/metricStyleName.xml new file mode 100644 index 0000000..9be4d73 --- /dev/null +++ b/AffdexMe/app/src/main/res/values/metricStyleName.xml @@ -0,0 +1,15 @@ + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/metricStylePct.xml b/AffdexMe/app/src/main/res/values/metricStylePct.xml new file mode 100644 index 0000000..1d43135 --- /dev/null +++ b/AffdexMe/app/src/main/res/values/metricStylePct.xml @@ -0,0 +1,11 @@ + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/optionsStyle.xml b/AffdexMe/app/src/main/res/values/optionsStyle.xml new file mode 100644 index 0000000..d359cd0 --- /dev/null +++ b/AffdexMe/app/src/main/res/values/optionsStyle.xml @@ -0,0 +1,10 @@ + + + + \ No newline at end of file diff --git a/AffdexMe/app/src/main/res/values/strings.xml b/AffdexMe/app/src/main/res/values/strings.xml new file mode 100644 index 0000000..41f773b --- /dev/null +++ b/AffdexMe/app/src/main/res/values/strings.xml @@ -0,0 +1,18 @@ + + AffdexMe + + SMILE + BROW RAISE + BROW FURROW + VALENCE + ENGAGEMENT + FROWN + FPS: + + Show FPS + Show Tracking + Processed Frames Per Second: + + Sorry, AffdexMe requires the use of a front-facing camera, which was not found on your device. + + diff --git a/AffdexMe/app/src/main/res/values/styles.xml b/AffdexMe/app/src/main/res/values/styles.xml new file mode 100644 index 0000000..7cc1e27 --- /dev/null +++ b/AffdexMe/app/src/main/res/values/styles.xml @@ -0,0 +1,6 @@ + + + + diff --git a/AffdexMe/build.gradle b/AffdexMe/build.gradle new file mode 100644 index 0000000..9405f3f --- /dev/null +++ b/AffdexMe/build.gradle @@ -0,0 +1,19 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + jcenter() + } + dependencies { + classpath 'com.android.tools.build:gradle:1.2.3' + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + jcenter() + } +} diff --git a/AffdexMe/gradle.properties b/AffdexMe/gradle.properties new file mode 100644 index 0000000..1d3591c --- /dev/null +++ b/AffdexMe/gradle.properties @@ -0,0 +1,18 @@ +# Project-wide Gradle settings. + +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. + +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html + +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +# Default value: -Xmx10248m -XX:MaxPermSize=256m +# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 + +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +# org.gradle.parallel=true \ No newline at end of file diff --git a/AffdexMe/gradle/wrapper/gradle-wrapper.jar b/AffdexMe/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..8c0fb64 Binary files /dev/null and b/AffdexMe/gradle/wrapper/gradle-wrapper.jar differ diff --git a/AffdexMe/gradle/wrapper/gradle-wrapper.properties b/AffdexMe/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..0c71e76 --- /dev/null +++ b/AffdexMe/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Wed Apr 10 15:27:10 PDT 2013 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip diff --git a/AffdexMe/gradlew b/AffdexMe/gradlew new file mode 100644 index 0000000..91a7e26 --- /dev/null +++ b/AffdexMe/gradlew @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; +esac + +# For Cygwin, ensure paths are in UNIX format before anything is touched. +if $cygwin ; then + [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` +fi + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >&- +APP_HOME="`pwd -P`" +cd "$SAVED" >&- + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules +function splitJvmOpts() { + JVM_OPTS=("$@") +} +eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS +JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" + +exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/AffdexMe/gradlew.bat b/AffdexMe/gradlew.bat new file mode 100644 index 0000000..8a0b282 --- /dev/null +++ b/AffdexMe/gradlew.bat @@ -0,0 +1,90 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/AffdexMe/settings.gradle b/AffdexMe/settings.gradle new file mode 100644 index 0000000..e7b4def --- /dev/null +++ b/AffdexMe/settings.gradle @@ -0,0 +1 @@ +include ':app' diff --git a/README.md b/README.md new file mode 100644 index 0000000..430e17a --- /dev/null +++ b/README.md @@ -0,0 +1,28 @@ +#Sample App for Affdex SDK for Android + +Welcome to our repository on GitHub! Here you will find example code to get you started with our Affdex SDK for Android and begin emotion-enabling you own app! + +AffdexMe +-------- + +*Dependencies* + +- Affectiva Android SDK (visit http://www.affectiva.com/solutions/apis-sdks/) + +**AffdexMe** is an app that demonstrates the use of the Affectiva Android SDK. It uses the front-facing camera on your Android device to view, process and analyze live video of your face. Start the app and you will see your own face on the screen and metrics describing your expressions. Tapping the screen will bring up a menu with options to display the Processed Frames Per Second metric, display facial tracking points, and control the rate at which frames are processed by the SDK. + +Most of the methods in this file control the application's UI. Therefore, if you are just interested in learning how the Affectiva SDK works, you will find the calls relevant to the use of the SDK in the startCamera() and stopCamera() methods, as well as the onImageResults() method. + +In order to use this project, you will need to: +- Obtain the Affectiva Android SDK +- Copy the contents of the SDK's assets folder into this project's assets folder +- Copy the contents of the SDK's libs folder into this project's libs folder +- Copy the armeabi-v7a folder (found in the SDK libs folder) into this project's jniLibs folder +- Add your license file to the /assets/Affdex folder and uncomment the line in the startCamera() method which specifies your license file path +- Build the project +- Run the app on an Android device with a front-facing camera + +Copyright (c) 2014 Affectiva. All rights reserved. + + +See the comment section at the top of the MainActivity.java file for more information. \ No newline at end of file