Updated project to reflect new capabilities of 1.2 SDK release

This commit is contained in:
acasallas 2015-07-13 14:17:57 -04:00
parent 5c3be3a9dd
commit 734b05c326
10 changed files with 234 additions and 173 deletions

View file

@ -1,5 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="EntryPointsManager">
<entry_points version="2.0" />
</component>
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />

View file

@ -69,8 +69,6 @@
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/dependency-cache" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/dex" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/dex-cache" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/appcompat-v7/22.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/exploded-aar/com.android.support/support-v4/22.1.1/jars" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/jacoco" />
<excludeFolder url="file://$MODULE_DIR$/build/intermediates/javaResources" />
@ -88,14 +86,11 @@
</content>
<orderEntry type="jdk" jdkName="Android API 22 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" exported="" name="appcompat-v7-22.1.1" level="project" />
<orderEntry type="library" exported="" name="affdex-sdk-1.1-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="dagger-1.2.2" level="project" />
<orderEntry type="library" exported="" name="Affdex-sdk" level="project" />
<orderEntry type="library" exported="" name="javax.inject-1" level="project" />
<orderEntry type="library" exported="" name="support-v4-22.1.1" level="project" />
<orderEntry type="library" exported="" name="support-annotations-22.1.1" level="project" />
<orderEntry type="library" exported="" name="affdex-sdk-1.1-SNAPSHOT-javadoc" level="project" />
<orderEntry type="library" exported="" name="gson-2.3" level="project" />
<orderEntry type="library" exported="" name="Affdex-sdk-javadoc" level="project" />
<orderEntry type="library" exported="" name="flurry-analytics-4.1.0" level="project" />
</component>
</module>

View file

@ -8,12 +8,12 @@ android {
applicationId "com.affectiva.affdexme"
minSdkVersion 16
targetSdkVersion 22
versionCode 11
versionName "1.0.844"
versionCode 14
versionName "1.0.14b"
}
buildTypes {
release {
minifyEnabled false
minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
@ -24,8 +24,8 @@ dependencies {
compile 'com.google.code.gson:gson:2.3'
//include the Affdex SDK jars
compile files('libs/Affdex-sdk.jar')
compile files('libs/Affdex-sdk-javadoc.jar')
compile files('libs/Affdex-sdk-1.2-SNAPSHOT.jar')
compile files('libs/Affdex-sdk-1.2-SNAPSHOT-javadoc.jar')
compile files('libs/dagger-1.2.2.jar')
compile files('libs/flurry-analytics-4.1.0.jar')
compile files('libs/javax.inject-1.jar')

View file

@ -14,14 +14,28 @@
#keep all classes (otherwise Proguard may remove classes that use reflection, injection, Gson, etc...)
-keep class sun.**
-keep class com.**
-keepclassmembers class sun.** {*;}
-keep class android.**
-keepclassmembers class android.** {*;}
-keep class dagger.**
-keepclassmembers class dagger.** {*;}
-keep class javax.**
-keepclassmembers class javax.** {*;}
#keep certain class members (otherwise Proguard would strip the members of these classes)
-keepclassmembers class com.affectiva.android.affdex.sdk.detector.License { *; }
-keep class com.**
-keepclassmembers class com.affectiva.android.affdex.sdk.detector.A* { *; }
-keepclassmembers class com.affectiva.android.affdex.sdk.detector.B* { *; }
-keepclassmembers class com.affectiva.android.affdex.sdk.detector.I* { *; }
-keepclassmembers class com.affectiva.android.affdex.sdk.detector.L* { *; }
-keepclassmembers class com.affectiva.android.affdex.sdk.Frame { *; }
-keepclassmembers class com.affectiva.affdexme.DrawingView {*;}
-keepclassmembers class com.affectiva.affdexme.MetricView {*;}
-keepclassmembers class com.affectiva.affdexme.GradientMetricView {*;}
-keepclassmembers class * {
@javax.inject.* *;
@dagger.* *;

View file

@ -20,7 +20,7 @@
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:configChanges="keyboardHidden|screenSize|orientation"
android:configChanges="screenSize|keyboardHidden|orientation"
android:screenOrientation="sensorPortrait"
android:windowSoftInputMode="stateHidden"
android:label="@string/app_name" >

View file

@ -28,15 +28,10 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
private Paint boxPaint;
private boolean stopFlag = false; //boolean to indicate when thread has been told to stop
private PointF[] nextPointsToDraw = null; //holds a reference to the most recent set of points returned by CameraDetector, passed in by main thread
boolean isDrawPointsEnabled = false; //saves whether user has selected dots to be drawn
float imageWidth = 0;
float imageHeight = 0;
float screenToImageRatio = 0;
float drawThickness = 0; //thickness with which dots and square will be drawn
private DrawingViewConfig config;
private final long drawPeriod = 33; //draw at 30 fps
public DrawingThread(SurfaceHolder surfaceHolder, boolean drawPoints) {
public DrawingThread(SurfaceHolder surfaceHolder, DrawingViewConfig con) {
mSurfaceHolder = surfaceHolder;
circlePaint = new Paint();
@ -46,7 +41,10 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
boxPaint.setColor(Color.WHITE);
boxPaint.setStyle(Paint.Style.STROKE);
isDrawPointsEnabled = drawPoints;
config = con;
setThickness(config.drawThickness);
}
/**
@ -58,7 +56,7 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
boxPaint.setColor(Color.rgb((int)colorScore,255,(int)colorScore));
} else {
float colorScore = ((100f+s)/100f)*255;
boxPaint.setColor(Color.rgb(255,(int)colorScore,(int)colorScore));
boxPaint.setColor(Color.rgb(255, (int) colorScore, (int) colorScore));
}
}
@ -75,23 +73,10 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
nextPointsToDraw = pointList;
}
//Sets measurements thread will use to draw facial tracking dots.
public void setDimen(int w, int h, float appToImageRatio, float thickness) {
imageWidth = w;
imageHeight = h;
screenToImageRatio = appToImageRatio;
drawThickness = thickness;
void setThickness(int thickness) {
boxPaint.setStrokeWidth(thickness);
}
private void setDrawPointsEnabled(boolean b) {
isDrawPointsEnabled = b;
}
private boolean getDrawPointsEnabled() {
return isDrawPointsEnabled;
}
//Inform thread face detection has stopped, so array of points is no longer valid.
public void invalidatePoints() {
nextPointsToDraw = null;
@ -115,7 +100,7 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
if (c!= null) {
synchronized (mSurfaceHolder) {
c.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); //clear previous dots
if (isDrawPointsEnabled && (nextPointsToDraw != null) ) {
if (config.isDrawPointsEnabled && (nextPointsToDraw != null) ) {
draw(c);
}
}
@ -137,6 +122,8 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
Log.e(LOG_TAG,ex.getMessage());
}
}
config = null; //nullify object to avoid memory leak
}
void draw(Canvas c) {
@ -144,9 +131,9 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
PointF[] points = nextPointsToDraw;
//Coordinates around which to draw bounding box.
float leftBx = imageWidth;
float leftBx = config.imageWidth;
float rightBx = 0;
float topBx = imageHeight;
float topBx = config.imageHeight;
float botBx = 0;
//Iterate through all the points given to us by the CameraDetector object
@ -164,21 +151,70 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
//Draw facial tracking dots.
//The camera preview is displayed as a mirror, so X pts have to be reversed
c.drawCircle((imageWidth - points[i].x - 1) * screenToImageRatio, (points[i].y)* screenToImageRatio, drawThickness, circlePaint);
c.drawCircle((config.imageWidth - points[i].x - 1) * config.screenToImageRatio, (points[i].y)* config.screenToImageRatio, config.drawThickness, circlePaint);
}
//Draw the bounding box.
c.drawRect((imageWidth - leftBx - 1) * screenToImageRatio, topBx * screenToImageRatio, (imageWidth - rightBx - 1) * screenToImageRatio, botBx * screenToImageRatio, boxPaint);
c.drawRect((config.imageWidth - leftBx - 1) * config.screenToImageRatio, topBx * config.screenToImageRatio, (config.imageWidth - rightBx - 1) * config.screenToImageRatio, botBx * config.screenToImageRatio, boxPaint);
}
}
class DrawingViewConfig {
private int imageHeight = 1;
private int imageWidth = 1;
private int surfaceViewWidth = 0;
private int surfaceViewHeight = 0;
private float screenToImageRatio = 0;
private int drawThickness = 0;
private boolean isImageDimensionsNeeded = true;
private boolean isSurfaceViewDimensionsNeeded = true;
private boolean isDrawPointsEnabled = true; //by default, have the drawing thread draw tracking dots
public void updateImageDimensions(int w, int h) {
if ( (w <= 0) || (h <= 0)) {
throw new IllegalArgumentException("Image Dimensions must be positive.");
}
imageWidth = w;
imageHeight = h;
if (!isSurfaceViewDimensionsNeeded) {
screenToImageRatio = (float)surfaceViewWidth / (float)imageWidth;
}
isImageDimensionsNeeded = false;
}
public void updateSurfaceViewDimensions(int w, int h) {
if ( (w <= 0) || (h <= 0)) {
throw new IllegalArgumentException("SurfaceView Dimensions must be positive.");
}
surfaceViewWidth = w;
surfaceViewHeight = h;
if (!isImageDimensionsNeeded) {
screenToImageRatio = (float)surfaceViewWidth / (float)imageWidth;
}
isSurfaceViewDimensionsNeeded = false;
}
public void setDrawThickness(int t) {
if ( t <= 0) {
throw new IllegalArgumentException("Thickness must be positive.");
}
drawThickness = t;
}
}
//Class variables of DrawingView class
private SurfaceHolder surfaceHolder;
private DrawingThread drawingThread; //DrawingThread object
private boolean isDimensionsNeeded = true;
private boolean isDrawPointsEnabled = true; //by default, start drawing thread without drawing points
private DrawingViewConfig drawingViewConfig;
private static String LOG_TAG = "AffdexMe";
//three constructors required of any custom view
public DrawingView(Context context) {
super(context);
@ -197,13 +233,14 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
surfaceHolder = getHolder(); //The SurfaceHolder object will be used by the thread to request canvas to draw on SurfaceView
surfaceHolder.setFormat(PixelFormat.TRANSPARENT); //set to Transparent so this surfaceView does not obscure the one it is overlaying (the one displaying the camera).
surfaceHolder.addCallback(this); //become a Listener to the three events below that SurfaceView throws
drawingThread = new DrawingThread(surfaceHolder, isDrawPointsEnabled);
drawingViewConfig = new DrawingViewConfig();
drawingThread = new DrawingThread(surfaceHolder, drawingViewConfig);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (drawingThread.isStopped()) {
drawingThread = new DrawingThread(surfaceHolder, isDrawPointsEnabled);
drawingThread = new DrawingThread(surfaceHolder, drawingViewConfig);
}
drawingThread.start();
}
@ -225,29 +262,51 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
Log.e(LOG_TAG,e.getMessage());
}
}
isDimensionsNeeded = true; //Now that thread has been destroyed, we'll need dimensions to be recalculated if a new thread is later created.
}
public void setDimensions(int width, int height, float appToImageRatio, float radius) {
drawingThread.setDimen(width, height, appToImageRatio, radius);
isDimensionsNeeded = false;
public boolean isImageDimensionsNeeded() {
return drawingViewConfig.isImageDimensionsNeeded;
}
public boolean isDimensionsNeeded() {
return isDimensionsNeeded;
public boolean isSurfaceDimensionsNeeded() {
return drawingViewConfig.isSurfaceViewDimensionsNeeded;
}
public void invalidateImageDimensions() {
drawingViewConfig.isImageDimensionsNeeded = true;
}
public void updateImageDimensions(int w, int h) {
try {
drawingViewConfig.updateImageDimensions(w, h);
} catch (Exception e) {
Log.e(LOG_TAG,e.getMessage());
}
}
public void updateSurfaceViewDimensions(int w, int h) {
try {
drawingViewConfig.updateSurfaceViewDimensions(w, h);
} catch (Exception e) {
Log.e(LOG_TAG,e.getMessage());
}
}
public void setThickness(int t) {
drawingViewConfig.setDrawThickness(t);
try {
drawingThread.setThickness(t);
} catch(Exception e) {
Log.e(LOG_TAG,e.getMessage());
}
}
public void setDrawPointsEnabled(boolean b){
isDrawPointsEnabled = b;
drawingThread.setDrawPointsEnabled(b);
}
public void invalidateDimensions() {
isDimensionsNeeded = true;
drawingViewConfig.isDrawPointsEnabled = b;
}
public boolean getDrawPointsEnabled() {
return isDrawPointsEnabled;
return drawingViewConfig.isDrawPointsEnabled;
}
//The methods below simply delegate to the drawingThread object
@ -266,6 +325,4 @@ public class DrawingView extends SurfaceView implements SurfaceHolder.Callback {
}

View file

@ -44,11 +44,14 @@ import com.affectiva.android.affdex.sdk.detector.Face;
* display facial tracking points, and control the rate at which frames are processed by the SDK.
*
* Most of the methods in this file control the application's UI. Therefore, if you are just interested in learning how the Affectiva SDK works,
* you will find the calls relevant to the use of the SDK in the startCamera() and stopCamera() methods, as well as the onImageResults() method.
* you will find the calls relevant to the use of the SDK in the initializeCameraDetector(), startCamera(), stopCamera(),
* and onImageResults() methods.
*
* This class implements the Detector.ImageListener interface, allowing it to receive the onImageResults() event.
* This class implements the Detector.FaceListener interface, allowing it to receive the onFaceDetectionStarted() and
* onFaceDetectionStopped() events.
* This class implements the CameraDetector.CameraSurfaceViewListener interface, allowing it to receive
* onSurfaceViewAspectRatioChanged() events.
*
* In order to use this project, you will need to:
* - Obtain the SDK from Affectiva (visit http://www.affdex.com/mobile-sdk)
@ -64,10 +67,9 @@ import com.affectiva.android.affdex.sdk.detector.Face;
*/
public class MainActivity extends Activity
implements Detector.FaceListener, Detector.ImageListener, TextView.OnEditorActionListener, View.OnTouchListener{
private static final String LOG_TAG = "AffdexMe";
implements Detector.FaceListener, Detector.ImageListener, TextView.OnEditorActionListener, View.OnTouchListener, CameraDetector.CameraSurfaceViewListener {
private static final String LOG_TAG = "Affectiva";
//Affectiva SDK Object
private CameraDetector detector = null;
@ -138,6 +140,8 @@ public class MainActivity extends Activity
}
initializeUI();
initializeCameraDetector();
}
void initializeUI() {
@ -209,7 +213,7 @@ public class MainActivity extends Activity
menuLayout.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
/**
/*
* This method effectively blocks the mainLayout from receiving a touch event
* when the menu is pressed. This is to prevent the menu from closing if the user accidentally touches it
* when aiming for a checkbox or edit box.
@ -219,7 +223,7 @@ public class MainActivity extends Activity
});
fpsEditText.setOnEditorActionListener(this);
/**
/*
* This app sets the View.SYSTEM_UI_FLAG_HIDE_NAVIGATION flag. Unfortunately, this flag causes
* Android to steal the first touch event after the navigation bar has been hidden, a touch event
* which should be used to make our menu visible again. Therefore, we attach a listener to be notified
@ -237,21 +241,44 @@ public class MainActivity extends Activity
});
}
/**
* We use onResume() to restore application settings using the SharedPreferences object and
* to indicate that dimensions should be recalculated.
void initializeCameraDetector() {
/* Put the SDK in camera mode by using this constructor. The SDK will be in control of
* the camera. If a SurfaceView is passed in as the last argument to the constructor,
* that view will be painted with what the camera sees.
*/
detector = new CameraDetector(this, CameraDetector.CameraType.CAMERA_FRONT, cameraView);
// NOTE: uncomment the line below and replace "YourLicenseFile" with your license file, which should be stored in /assets/Affdex/
//detector.setLicensePath("YourLicenseFile");
// We want to detect all expressions, so turn on all classifiers.
detector.setDetectSmile(true);
detector.setDetectBrowFurrow(true);
detector.setDetectBrowRaise(true);
detector.setDetectEngagement(true);
detector.setDetectValence(true);
detector.setDetectLipCornerDepressor(true);
detector.setMaxProcessRate(detectorProcessRate);
detector.setImageListener(this);
detector.setFaceListener(this);
detector.setCameraDetectorDimensionsListener(this);
}
/*
* We use onResume() to restore application settings using the SharedPreferences object
*/
@Override
public void onResume() {
super.onResume();
restoreApplicationSettings();
drawingView.invalidateDimensions(); //set flag to have screen dimensions resized (usage appears in onImageResults())
setMenuVisible(false); //always make the menu invisible by default
}
/**
/*
* We use the Shared Preferences object to restore application settings.
* **/
*/
public void restoreApplicationSettings() {
sharedPreferences = getSharedPreferences(PREFS_NAME, 0);
@ -287,57 +314,40 @@ public class MainActivity extends Activity
/**
* We start the camera as soon as the application has been given focus, which occurs as soon as the application has
* been opened or reopened. Although this can also occur when the application regains focus after a dialog box has been closed,
* the camera will not be reinitialized because the detector object will not have been set to null during onPause().
* the startCamera() method will not start the camera if it is already running.
* We also reset variables used to calculate the Processed Frames Per Second.
*/
@Override
public void onWindowFocusChanged(boolean hasFocus) {
if (hasFocus && isFrontFacingCameraDetected) {
startCamera();
if (!drawingView.isSurfaceDimensionsNeeded()) {
progressBarLayout.setVisibility(View.GONE);
}
resetFPSCalculations();
}
}
void startCamera() {
if (detector == null) {
/** Put the SDK in camera mode by using this constructor. The SDK will be in control of
* the camera. If a SurfaceView is passed in as the last argument to the constructor,
* that view will be painted with what the camera sees.
*/
detector = new CameraDetector(this, CameraDetector.CameraType.CAMERA_FRONT, cameraView);
// NOTE: uncomment the line below and replace "YourLicenseFile" with your license file, which should be stored in /assets/Affdex/
//detector.setLicensePath("YourLicenseFile");
// We want to detect all expressions, so turn on all classifiers.
detector.setDetectSmile(true);
detector.setDetectBrowFurrow(true);
detector.setDetectBrowRaise(true);
detector.setDetectEngagement(true);
detector.setDetectValence(true);
detector.setDetectLipCornerDepressor(true);
detector.setMaxProcessRate(detectorProcessRate);
detector.setImageListener(this);
detector.setFaceListener(this);
//now that the CameraDetector object has been set up, start the camera
if (!detector.isRunning()) {
try {
detector.start();
} catch (Exception e) {
Log.e(LOG_TAG, e.getMessage());
}
}
}
}
@Override
public void onFaceDetectionStarted() {
leftMetricsLayout.animate().alpha(1); //make left and right metrics appear
rightMetricsLayout.animate().alpha(1);
resetFPSCalculations(); //Since the FPS may be different whether a face is being tracked or not, reset variables.
}
@Override
@ -352,18 +362,17 @@ public class MainActivity extends Activity
resetFPSCalculations(); //Since the FPS may be different whether a face is being tracked or not, reset variables.
}
/**
* This event is received every time the SDK processes a frame.
*/
@Override
public void onImageResults(List<Face> faces, Frame image, float timeStamp) {
/**
* If the flag indicating that we need to size our layout is set, call calculateDimensions().
* The flag is a boolean stored in our drawingView object, retrieved through DrawingView.isDimensionsNeeded().
* If the flag indicating that we still need to know the size of the camera frames, call calculateImageDimensions().
* The flag is a boolean stored in our drawingView object, retrieved through DrawingView.isImageDimensionsNeeded().
*/
if (drawingView.isDimensionsNeeded() ) {
calculateDimensions(image);
if (drawingView.isImageDimensionsNeeded() ) {
calculateImageDimensions(image);
}
//If the faces object is null, we received an unprocessed frame
@ -404,70 +413,50 @@ public class MainActivity extends Activity
}
/**
* This method serves two purposes:
* -It informs the drawing thread of the size of the frames passed by the CameraDetector object.
* -It corrects the dimensions of our mainLayout object to conform to the aspect ratio of the frames passed by the CameraDetector object.
* In this method, we update our drawingView to contain the dimensions of the frames coming from the camera so that drawingView
* can correctly draw the tracking dots. We also call drawingView.setThickness(), which sets the size of the tracking dots and the
* thickness of the bounding box.
*/
void calculateDimensions(Frame image){
//Log.i(LOG_TAG,"Dimensions being re-calculated");
float screenWidth = activityLayout.getWidth();
float screenHeight = activityLayout.getHeight();
float referenceDimension = screenHeight; //referenceDimension will be used to determine the size of the facial tracking dots
void calculateImageDimensions(Frame image){
///referenceDimension will be used to determine the size of the facial tracking dots
float referenceDimension = activityLayout.getHeight();
//get size of frames being passed by camera
float imageWidth = image.getWidth();
float imageHeight = image.getHeight();
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
/**
* If device is rotated vertically, reverse the width and height returned by the Frame object,
* and switch the dimension we consider to be the reference dimension.
*/
if ((ROTATE.BY_90_CW == image.getTargetRotation()) || (ROTATE.BY_90_CCW == image.getTargetRotation())) {
float temp = imageWidth;
int temp = imageWidth;
imageWidth = imageHeight;
imageHeight = temp;
referenceDimension = screenWidth;
referenceDimension = activityLayout.getWidth();
}
/**
* In this section, we resize our layouts so that the SurfaceView displaying the camera images to will have the same
* aspect ratio as the frames we are receiving from the camera.
* Since all elements in our app are inside 'mainLayout', we just have to adjust the height and width of this layout.
*/
//calculate aspect ratios of camera frames and screen
float imageAspectRatio = imageWidth/imageHeight;
float screenAspectRatio = screenWidth/screenHeight;
float screenToImageRatio = 0;
int newLayoutHeight = 0;
int newLayoutWidth = 0;
if (screenAspectRatio < imageAspectRatio) {
newLayoutHeight = (int) (screenWidth / imageAspectRatio);
screenToImageRatio = screenWidth / imageWidth;
newLayoutWidth = (int)screenWidth;
} else {
newLayoutWidth = (int) (screenHeight * imageAspectRatio);
screenToImageRatio = screenHeight/imageHeight;
newLayoutHeight = (int)screenHeight;
}
drawingView.updateImageDimensions(imageWidth,imageHeight);
drawingView.setThickness((int)(referenceDimension/160f));
}
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) mainLayout.getLayoutParams();
params.height = newLayoutHeight;
params.width = newLayoutWidth;
/**
* This method is called when the SDK has corrected the aspect ratio of the SurfaceView. We use this information to resize
* our mainLayout ViewGroup so the UI fits snugly around the SurfaceView. We also update our drawingView object, so the tracking dots
* are drawn in the correct coordinates.
*/
@Override
public void onSurfaceViewAspectRatioChanged(int width, int height) {
drawingView.updateSurfaceViewDimensions(width,height);
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mainLayout.getLayoutParams();
params.height = height;
params.width = width;
mainLayout.setLayoutParams(params);
/**
* Send necessary dimensions to the drawing thread.
* The dimensions are: width of frame, height of frame, ratio of screen to frame size, and thickness of facial tracking dots.
* This method will clear the flag that indicates whether we need to calculate dimensions, so this calculateDimensions()
* will not be continuously called.
*/
drawingView.setDimensions((int) imageWidth, (int) imageHeight, screenToImageRatio, referenceDimension / 160);
//Now that the aspect ratio has been corrected, remove the progress bar from obscuring the screen
//Now that our main layout has been resized, we can remove the progress bar that was obscuring the screen (its purpose was to obscure the resizing of the SurfaceView)
progressBarLayout.setVisibility(View.GONE);
}
@ -496,22 +485,18 @@ public class MainActivity extends Activity
@Override
public void onPause() {
super.onPause();
saveApplicationSettings();
progressBarLayout.setVisibility(View.VISIBLE);
saveApplicationSettings();
stopCamera();
}
private void stopCamera() {
performFaceDetectionStoppedTasks();
if (null != detector) {
try {
detector.stop();
} catch (Exception e) {
Log.e("AffdexMe", e.getMessage());
}
try {
detector.stop();
} catch (Exception e) {
Log.e(LOG_TAG, e.getMessage());
}
detector = null; //setting detector to null will allow startCamera() to recreate the detector object when the application is reopened.
}
/**

View file

@ -2,9 +2,7 @@
xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent"
android:layout_height="match_parent" tools:context=".MainActivity" android:focusable="true"
android:focusableInTouchMode="true"
android:id="@+id/main_layout"
android:keepScreenOn="true"
android:layout_gravity="center"
>
<SurfaceView
android:layout_width="match_parent"
@ -12,18 +10,27 @@
android:layout_centerInParent="true"
android:id="@+id/camera_preview"
/>
<com.affectiva.affdexme.DrawingView
android:layout_width="match_parent"
<RelativeLayout
android:layout_height="match_parent"
android:layout_width="match_parent"
android:id="@+id/main_layout"
android:layout_centerInParent="true"
android:id="@+id/drawing_view"/>
<include layout="@layout/metric_layout"
android:id="@+id/metric_view_group"
/>
<include layout="@layout/menu_layout"
android:layout_below="@id/metric_view_group"
android:layout_width="fill_parent"
android:layout_height="wrap_content"/>
>
<com.affectiva.affdexme.DrawingView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:id="@+id/drawing_view"/>
<include layout="@layout/metric_layout"
android:id="@+id/metric_view_group"
/>
<include layout="@layout/menu_layout"
android:layout_below="@id/metric_view_group"
android:layout_width="fill_parent"
android:layout_height="wrap_content"/>
</RelativeLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent"

View file

@ -11,7 +11,7 @@ AffdexMe
**AffdexMe** is an app that demonstrates the use of the Affectiva Android SDK. It uses the front-facing camera on your Android device to view, process and analyze live video of your face. Start the app and you will see your own face on the screen and metrics describing your expressions. Tapping the screen will bring up a menu with options to display the Processed Frames Per Second metric, display facial tracking points, and control the rate at which frames are processed by the SDK.
Most of the methods in this file control the application's UI. Therefore, if you are just interested in learning how the Affectiva SDK works, you will find the calls relevant to the use of the SDK in the startCamera() and stopCamera() methods, as well as the onImageResults() method.
Most of the methods in this file control the application's UI. Therefore, if you are just interested in learning how the Affectiva SDK works, you will find the calls relevant to the use of the SDK in the initializeCameraDetector(), startCamera(), stopCamera(), and onImageResults() methods.
The AffdexMe folder is an Android Studio project.