merge in nyc-release history after reset to nyc-dev
diff --git a/Android.mk b/Android.mk
index 6433b95..d3deaac 100644
--- a/Android.mk
+++ b/Android.mk
@@ -19,6 +19,7 @@
 LOCAL_MODULE_TAGS := optional
 
 LOCAL_SDK_VERSION := current
+LOCAL_MIN_SDK_VERSION := 21
 
 LOCAL_SRC_FILES := \
 	$(call all-java-files-under, src)
diff --git a/src/com/android/devcamera/Api2Camera.java b/src/com/android/devcamera/Api2Camera.java
index 1c61cb0..73e5c87 100644
--- a/src/com/android/devcamera/Api2Camera.java
+++ b/src/com/android/devcamera/Api2Camera.java
@@ -20,6 +20,7 @@
 import android.graphics.SurfaceTexture;
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CameraManager;
 import android.hardware.camera2.CameraMetadata;
@@ -155,13 +156,24 @@
         });
 
         // Set initial Noise and Edge modes.
-        if (mCameraInfoCache.IS_BULLHEAD || mCameraInfoCache.IS_ANGLER) {
+        if (mCameraInfoCache.isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
             // YUV streams.
-            mCaptureNoiseIndex = 4 /*ZSL*/ % mCameraInfoCache.noiseModes.length;
-            mCaptureEdgeIndex = 3 /*ZSL*/ % mCameraInfoCache.edgeModes.length;
+            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.noiseModes,
+                    CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG)) {
+                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
+            } else {
+                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_FAST;
+            }
+            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.edgeModes,
+                    CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG)) {
+                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG;
+            } else {
+                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_FAST;
+            }
+
             // Reprocessing.
-            mReprocessingNoiseIndex = 2 /*High Quality*/ % mCameraInfoCache.noiseModes.length;
-            mReprocessingEdgeIndex = 2 /*HIgh Quality*/ % mCameraInfoCache.edgeModes.length;
+            mReprocessingNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_HIGH_QUALITY;
+            mReprocessingEdgeMode = CameraCharacteristics.EDGE_MODE_HIGH_QUALITY;
         }
     }
 
@@ -279,7 +291,7 @@
     }
 
     public boolean isReprocessingAvailable() {
-        return mCameraInfoCache.reprocessingAvailable();
+        return mCameraInfoCache.isYuvReprocessingAvailable();
     }
 
     @Override
@@ -288,13 +300,20 @@
     }
 
     @Override
+    public float[] getFieldOfView() {
+        return mCameraInfoCache.getFieldOfView();
+    }
+
+    @Override
+    public int getOrientation() {
+        return mCameraInfoCache.sensorOrientation();
+    }
+
+    @Override
     public void openCamera() {
-        // If API2 FULL mode is not available, display toast, do nothing.
+        // If API2 FULL mode is not available, display toast
         if (!mCameraInfoCache.isCamera2FullModeAvailable()) {
             mMyCameraCallback.noCamera2Full();
-            if (!mCameraInfoCache.IS_NEXUS_6) {
-                return;
-            }
         }
 
         Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId());
@@ -390,14 +409,14 @@
                     " x " + mCameraInfoCache.getRawStreamSize().getHeight());
         }
 
-        if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+        if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
             outputSurfaces.add(mJpegImageReader.getSurface());
             Log.v(TAG, "  .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() +
                     " x " + mCameraInfoCache.getJpegStreamSize().getHeight());
         }
 
         try {
-            if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+            if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
                 InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(),
                         mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888);
                 mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces,
@@ -441,28 +460,27 @@
     private boolean mCaptureYuv1 = false;
     private boolean mCaptureYuv2 = false;
     private boolean mCaptureRaw = false;
-    private int mCaptureNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
-    private int mCaptureEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
+    private int mCaptureNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_FAST;
+    private int mCaptureEdgeMode = CaptureRequest.EDGE_MODE_FAST;
     private boolean mCaptureFace = false;
     // Variables to hold reprocessing state.
-    private int mReprocessingNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
-    private int mReprocessingEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
-
+    private int mReprocessingNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY;
+    private int mReprocessingEdgeMode = CaptureRequest.EDGE_MODE_HIGH_QUALITY;
 
     public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) {
         if (yuv1 != null) mCaptureYuv1 = yuv1;
         if (yuv2 != null) mCaptureYuv2 = yuv2;
         if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE;
         if (nr) {
-            mCaptureNoiseIndex = ++mCaptureNoiseIndex % mCameraInfoCache.noiseModes.length;
+            mCaptureNoiseMode = getNextMode(mCaptureNoiseMode, mCameraInfoCache.noiseModes);
         }
         if (edge) {
-            mCaptureEdgeIndex = ++mCaptureEdgeIndex % mCameraInfoCache.edgeModes.length;
+            mCaptureEdgeMode = getNextMode(mCaptureEdgeMode, mCameraInfoCache.edgeModes);
         }
         if (face != null) mCaptureFace = face;
         mMyCameraCallback.setNoiseEdgeText(
-                "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mCaptureNoiseIndex]),
-                "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mCaptureEdgeIndex])
+                "NR " + noiseModeToString(mCaptureNoiseMode),
+                "Edge " + edgeModeToString(mCaptureEdgeMode)
         );
 
         if (mCurrentCaptureSession != null) {
@@ -472,14 +490,14 @@
 
     public void setReprocessingFlow(Boolean nr, Boolean edge) {
         if (nr) {
-            mReprocessingNoiseIndex = ++mReprocessingNoiseIndex % mCameraInfoCache.noiseModes.length;
+            mReprocessingNoiseMode = getNextMode(mReprocessingNoiseMode, mCameraInfoCache.noiseModes);
         }
         if (edge) {
-            mReprocessingEdgeIndex = ++mReprocessingEdgeIndex % mCameraInfoCache.edgeModes.length;
+            mReprocessingEdgeMode = getNextMode(mReprocessingEdgeMode, mCameraInfoCache.edgeModes);
         }
         mMyCameraCallback.setNoiseEdgeTextForReprocessing(
-                "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]),
-                "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mReprocessingEdgeIndex])
+                "NR " + noiseModeToString(mReprocessingNoiseMode),
+                "Edge " + edgeModeToString(mReprocessingEdgeMode)
         );
     }
 
@@ -496,11 +514,11 @@
                 b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
             }
 
-            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mCaptureNoiseIndex]);
-            b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mCaptureEdgeIndex]);
+            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCaptureNoiseMode);
+            b1.set(CaptureRequest.EDGE_MODE, mCaptureEdgeMode);
             b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
 
-            Log.v(TAG, "  .. NR=" + mCaptureNoiseIndex + "  Edge=" + mCaptureEdgeIndex + "  Face=" + mCaptureFace);
+            Log.v(TAG, "  .. NR=" + mCaptureNoiseMode + "  Edge=" + mCaptureEdgeMode + "  Face=" + mCaptureFace);
 
             if (mCaptureYuv1) {
                 b1.addTarget(mYuv1ImageReader.getSurface());
@@ -543,11 +561,11 @@
         Log.v(TAG, "  Sent YUV1 image to ImageWriter.queueInputImage()");
         try {
             CaptureRequest.Builder b1 = mCameraDevice.createReprocessCaptureRequest(mLastTotalCaptureResult);
-            // Portrait.
-            b1.set(CaptureRequest.JPEG_ORIENTATION, 90);
+            // Todo: Read current orientation instead of just assuming device is in native orientation
+            b1.set(CaptureRequest.JPEG_ORIENTATION, mCameraInfoCache.sensorOrientation());
             b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
-            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]);
-            b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mReprocessingEdgeIndex]);
+            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mReprocessingNoiseMode);
+            b1.set(CaptureRequest.EDGE_MODE, mReprocessingEdgeMode);
             b1.addTarget(mJpegImageReader.getSurface());
             mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler);
             mReprocessingRequestNanoTime = System.nanoTime();
@@ -775,6 +793,32 @@
      * UTILITY FUNCTIONS *
      *********************/
 
+    /**
+     * Return the next mode after currentMode in supportedModes, wrapping to
+     * start of mode list if currentMode is last.  Returns currentMode if it is not found in
+     * supportedModes.
+     *
+     * @param currentMode
+     * @param supportedModes
+     * @return next mode after currentMode in supportedModes
+     */
+    private int getNextMode(int currentMode, int[] supportedModes) {
+        boolean getNext = false;
+        for (int m : supportedModes) {
+            if (getNext) {
+                return m;
+            }
+            if (m == currentMode) {
+                getNext = true;
+            }
+        }
+        if (getNext) {
+            return supportedModes[0];
+        }
+        // Can't find mode in list
+        return currentMode;
+    }
+
     private static String edgeModeToString(int mode) {
         switch (mode) {
             case CaptureRequest.EDGE_MODE_OFF:
@@ -783,13 +827,12 @@
                 return "FAST";
             case CaptureRequest.EDGE_MODE_HIGH_QUALITY:
                 return "HiQ";
-            case 3:
+            case CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG:
                 return "ZSL";
         }
         return Integer.toString(mode);
     }
 
-
     private static String noiseModeToString(int mode) {
         switch (mode) {
             case CaptureRequest.NOISE_REDUCTION_MODE_OFF:
@@ -798,9 +841,9 @@
                 return "FAST";
             case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY:
                 return "HiQ";
-            case 3:
+            case CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL:
                 return "MIN";
-            case 4:
+            case CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG:
                 return "ZSL";
         }
         return Integer.toString(mode);
diff --git a/src/com/android/devcamera/CameraInfoCache.java b/src/com/android/devcamera/CameraInfoCache.java
index 03d27a4..699fd97 100644
--- a/src/com/android/devcamera/CameraInfoCache.java
+++ b/src/com/android/devcamera/CameraInfoCache.java
@@ -24,6 +24,7 @@
 import android.os.Build;
 import android.util.Log;
 import android.util.Size;
+import android.util.SizeF;
 
 /**
  * Caches (static) information about the first/main camera.
@@ -33,13 +34,7 @@
 public class CameraInfoCache {
     private static final String TAG = "DevCamera_CAMINFO";
 
-    public static final boolean IS_NEXUS_5 = "hammerhead".equalsIgnoreCase(Build.DEVICE);
     public static final boolean IS_NEXUS_6 = "shamu".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_NEXUS_9 = "flounder".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_ANGLER = "angler".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_BULLHEAD = "bullhead".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_SAMSUNG_S6 = "zerofltevzw".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_LG_G4 = "p1_lgu_kr".equalsIgnoreCase(Build.PRODUCT);
 
     public int[] noiseModes;
     public int[] edgeModes;
@@ -53,7 +48,7 @@
     private Integer mSensorOrientation;
     private Integer mRawFormat;
     private int mBestFaceMode;
-    private boolean mCamera2FullModeAvailable;
+    private int mHardwareLevel;
 
     /**
      * Constructor.
@@ -115,37 +110,101 @@
         noiseModes = mCameraCharacteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES);
 
         // Misc stuff.
-        int hwLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-
-        mCamera2FullModeAvailable = (hwLevel != CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY)
-                && (hwLevel >= CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
+        mHardwareLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
 
         mSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
     }
 
+    boolean supportedModesContains(int[] modes, int mode) {
+        for (int m : modes) {
+            if (m == mode) return true;
+        }
+        return false;
+    }
+
     public int sensorOrientation() {
         return mSensorOrientation;
     }
 
     public boolean isCamera2FullModeAvailable() {
-        return mCamera2FullModeAvailable;
+        return isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
+    }
+
+    public boolean isHardwareLevelAtLeast(int level) {
+        // Special-case LEGACY since it has numerical value 2
+        if (level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+            // All devices are at least LEGACY
+            return true;
+        }
+        if (mHardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+            // Since level isn't LEGACY
+            return false;
+        }
+        // All other levels can be compared numerically
+        return mHardwareLevel >= level;
+    }
+
+    public boolean isCapabilitySupported(int capability) {
+        int[] caps = mCameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+        for (int c: caps) {
+            if (c == capability) return true;
+        }
+        return false;
     }
 
     public float getDiopterLow() {
-        if (IS_NEXUS_6) {
-            return 0f;
-        }
         return 0f; // Infinity
     }
 
     public float getDiopterHi() {
-        if (IS_NEXUS_6) {
-            return 14.29f;
-        }
-        return 16f;
+        Float minFocusDistance =
+                mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+        // LEGACY devices don't report this, but they won't report focus distance anyway, so just
+        // default to zero
+        return (minFocusDistance == null) ? 0.0f : minFocusDistance;
     }
 
     /**
+     * Calculate camera device horizontal and vertical fields of view.
+     *
+     * @return horizontal and vertical field of view, in degrees.
+     */
+    public float[] getFieldOfView() {
+        float[] availableFocalLengths =
+                mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
+        float focalLength = 4.5f; // mm, default from Nexus 6P
+        if (availableFocalLengths == null || availableFocalLengths.length == 0) {
+            Log.e(TAG, "No focal length reported by camera device, assuming default " +
+                    focalLength);
+        } else {
+            focalLength = availableFocalLengths[0];
+        }
+        SizeF physicalSize =
+                mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE);
+        if (physicalSize == null) {
+            physicalSize = new SizeF(6.32f, 4.69f); // mm, default from Nexus 6P
+            Log.e(TAG, "No physical sensor dimensions reported by camera device, assuming default "
+                    + physicalSize);
+        }
+
+        // Only active array is actually visible, so calculate fraction of physicalSize that it takes up
+        Size pixelArraySize = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);
+        Rect activeArraySize = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+        float activeWidthFraction = activeArraySize.width() / (float) pixelArraySize.getWidth();
+        float activeHeightFraction = activeArraySize.height() / (float) pixelArraySize.getHeight();
+
+        // Simple rectilinear lens field of view formula:
+        //   angle of view = 2 * arctan ( active size / (2 * focal length) )
+        float[] fieldOfView = new float[2];
+        fieldOfView[0] = (float) Math.toDegrees(
+                2 * Math.atan(physicalSize.getWidth() * activeWidthFraction / 2 / focalLength));
+        fieldOfView[1] = (float) Math.toDegrees(
+                2 * Math.atan(physicalSize.getHeight() * activeHeightFraction / 2 / focalLength));
+
+        return fieldOfView;
+    }
+    /**
      * Private utility function.
      */
     private Size returnLargestSize(Size[] sizes) {
@@ -194,7 +253,8 @@
         if (aspect > 1.6) {
             return new Size(1920, 1080); // TODO: Check available resolutions.
         }
-        if (IS_ANGLER || IS_BULLHEAD) {
+        if (isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
+            // Bigger preview size for more advanced devices
             return new Size(1440, 1080);
         }
         return new Size(1280, 960); // TODO: Check available resolutions.
@@ -215,9 +275,9 @@
     public boolean rawAvailable() {
         return mRawSize != null;
     }
-    public boolean reprocessingAvailable() {
-        // TODO: Actually query capabilities list.
-        return (IS_ANGLER || IS_BULLHEAD);
+    public boolean isYuvReprocessingAvailable() {
+        return isCapabilitySupported(
+                CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
     }
 
     public Integer getRawFormat() {
diff --git a/src/com/android/devcamera/CameraInterface.java b/src/com/android/devcamera/CameraInterface.java
index 61c1a63..e6df5ce 100644
--- a/src/com/android/devcamera/CameraInterface.java
+++ b/src/com/android/devcamera/CameraInterface.java
@@ -28,6 +28,18 @@
     Size getPreviewSize();
 
     /**
+     * Get camera field of view, in degrees. Entry 0 is horizontal, entry 1 is vertical FOV.
+     */
+    float[] getFieldOfView();
+
+    /**
+     * Get the camera sensor orientation relative to device native orientation
+     * Typically 90 or 270 for phones, 0 or 180 for tablets, though many tables are also
+     * portrait-native.
+     */
+    int getOrientation();
+
+    /**
      * Open the camera. Call startPreview() to actually see something.
      */
     void openCamera();
diff --git a/src/com/android/devcamera/DevCameraActivity.java b/src/com/android/devcamera/DevCameraActivity.java
index fe49a3a..869e065 100644
--- a/src/com/android/devcamera/DevCameraActivity.java
+++ b/src/com/android/devcamera/DevCameraActivity.java
@@ -19,6 +19,7 @@
 import android.content.Intent;
 import android.content.pm.PackageManager;
 import android.graphics.Color;
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.SensorManager;
 import android.os.Bundle;
@@ -538,12 +539,12 @@
     GyroOperations mGyroOperations;
 
     private void startGyroDisplay() {
-        // TODO: Get field of view angles from Camera API.
-        // TODO: Consider turning OIS off.
-        float fovLargeDegrees = 62.7533f; // Nexus 6
-        float fovSmallDegrees = 49.157f; // Nexus 6
-        mPreviewOverlay.setFieldOfView(fovLargeDegrees, fovSmallDegrees);
 
+        float[] fovs = mCamera.getFieldOfView();
+        mPreviewOverlay.setFieldOfView(fovs[0], fovs[1]);
+        mPreviewOverlay.setFacingAndOrientation(mToggleFrontCam.isChecked() ?
+                CameraCharacteristics.LENS_FACING_FRONT : CameraCharacteristics.LENS_FACING_BACK,
+                mCamera.getOrientation());
         if (mGyroOperations == null) {
             SensorManager sensorManager = (SensorManager) getSystemService(this.SENSOR_SERVICE);
             mGyroOperations = new GyroOperations(sensorManager);
diff --git a/src/com/android/devcamera/PreviewOverlay.java b/src/com/android/devcamera/PreviewOverlay.java
index dff60e9..5909b2d 100644
--- a/src/com/android/devcamera/PreviewOverlay.java
+++ b/src/com/android/devcamera/PreviewOverlay.java
@@ -21,6 +21,7 @@
 import android.graphics.Paint;
 import android.graphics.PointF;
 import android.graphics.RectF;
+import android.hardware.camera2.CameraCharacteristics;
 import android.util.AttributeSet;
 import android.view.View;
 
@@ -41,8 +42,12 @@
     private int mAfState;
     private float mFovLargeDegrees;
     private float mFovSmallDegrees;
+    private int mFacing = CameraCharacteristics.LENS_FACING_BACK;
+    private int mOrientation = 0;  // degrees
+
     float[] mAngles = new float[2];
 
+
     public PreviewOverlay(Context context, AttributeSet attrs) {
         super(context, attrs);
         Resources res = getResources();
@@ -69,6 +74,15 @@
         invalidate();
     }
 
+    /**
+     * Set the facing of the current camera, for correct coordinate mapping.
+     * One of the CameraCharacteristics.LENS_INFO_FACING_* constants
+     */
+    public void setFacingAndOrientation(int facing, int orientation) {
+        mFacing = facing;
+        mOrientation = orientation;
+    }
+
     public void show3AInfo(boolean show) {
         mShow3AInfo = show;
         this.setVisibility(VISIBLE);
@@ -76,7 +90,40 @@
     }
 
     public void setGyroAngles(float[] angles) {
-        mAngles = angles;
+        boolean front = (mFacing == CameraCharacteristics.LENS_FACING_BACK);
+        // Rotate gyro coordinates to match camera orientation
+        // Gyro data is always presented in the device native coordinate system, which
+        // is either portrait or landscape depending on device.
+        // (http://developer.android.com/reference/android/hardware/SensorEvent.html)
+        // DevCamera locks itself to portrait, and the camera sensor long edge is always aligned
+        // with the long edge of the device.
+        // mOrientation is the relative orientation of the camera sensor and the device native
+        // orientation, so it can be used to decide if the gyro data is meant to be interpreted
+        // in landscape or portrait and flip coordinates/sign accordingly.
+        // Additionally, front-facing cameras are mirrored, so an additional sign flip is needed.
+        switch (mOrientation) {
+            case 0:
+                mAngles[1] = -angles[0];
+                mAngles[0] = angles[1];
+                break;
+            case 90:
+                mAngles[0] = angles[0];
+                mAngles[1] = angles[1];
+                break;
+            case 180:
+                mAngles[1] = -angles[0];
+                mAngles[0] = angles[1];
+                break;
+            case 270:
+                mAngles[0] = angles[0];
+                mAngles[1] = angles[1];
+                break;
+        }
+        if (mFacing != CameraCharacteristics.LENS_FACING_BACK) {
+            // Reverse sensor readout for front/external facing cameras
+            mAngles[0] = -mAngles[0];
+            mAngles[1] = -mAngles[1];
+        }
     }
 
     public void setFieldOfView(float fovLargeDegrees, float fovSmallDegrees) {
@@ -201,7 +248,6 @@
             float focalLengthH = 0.5f * previewH / (float) Math.tan(Math.toRadians(mFovLargeDegrees) * 0.5);
             float focalLengthW = 0.5f * previewW / (float) Math.tan(Math.toRadians(mFovSmallDegrees) * 0.5);
             final double ANGLE_STEP = (float) Math.toRadians(10f);
-
             // Draw horizontal lines, with 10 degree spacing.
             double phase1 = mAngles[0] % ANGLE_STEP;
             for (double i = -5 * ANGLE_STEP + phase1; i < 5 * ANGLE_STEP; i += ANGLE_STEP) {