Remove device whitelisting and hardcoded constants am: 9736d0f303
am: 8ca0af841b

* commit '8ca0af841b5efcc367ed11169fdcccc98268c8d2':
  Remove device whitelisting and hardcoded constants

Change-Id: Iec35a92d2a2e5b10450156c17751d0917861d3bc
diff --git a/src/com/android/devcamera/Api2Camera.java b/src/com/android/devcamera/Api2Camera.java
index 1c61cb0..65308a5 100644
--- a/src/com/android/devcamera/Api2Camera.java
+++ b/src/com/android/devcamera/Api2Camera.java
@@ -20,6 +20,7 @@
 import android.graphics.SurfaceTexture;
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CameraManager;
 import android.hardware.camera2.CameraMetadata;
@@ -155,13 +156,24 @@
         });
 
         // Set initial Noise and Edge modes.
-        if (mCameraInfoCache.IS_BULLHEAD || mCameraInfoCache.IS_ANGLER) {
+        if (mCameraInfoCache.isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
             // YUV streams.
-            mCaptureNoiseIndex = 4 /*ZSL*/ % mCameraInfoCache.noiseModes.length;
-            mCaptureEdgeIndex = 3 /*ZSL*/ % mCameraInfoCache.edgeModes.length;
+            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.noiseModes,
+                    CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG)) {
+                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
+            } else {
+                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_FAST;
+            }
+            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.edgeModes,
+                    CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG)) {
+                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG;
+            } else {
+                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_FAST;
+            }
+
             // Reprocessing.
-            mReprocessingNoiseIndex = 2 /*High Quality*/ % mCameraInfoCache.noiseModes.length;
-            mReprocessingEdgeIndex = 2 /*HIgh Quality*/ % mCameraInfoCache.edgeModes.length;
+            mReprocessingNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_HIGH_QUALITY;
+            mReprocessingEdgeMode = CameraCharacteristics.EDGE_MODE_HIGH_QUALITY;
         }
     }
 
@@ -279,7 +291,7 @@
     }
 
     public boolean isReprocessingAvailable() {
-        return mCameraInfoCache.reprocessingAvailable();
+        return mCameraInfoCache.isYuvReprocessingAvailable();
     }
 
     @Override
@@ -288,13 +300,15 @@
     }
 
     @Override
+    public float[] getFieldOfView() {
+        return mCameraInfoCache.getFieldOfView();
+    }
+
+    @Override
     public void openCamera() {
-        // If API2 FULL mode is not available, display toast, do nothing.
+        // If API2 FULL mode is not available, display toast
         if (!mCameraInfoCache.isCamera2FullModeAvailable()) {
             mMyCameraCallback.noCamera2Full();
-            if (!mCameraInfoCache.IS_NEXUS_6) {
-                return;
-            }
         }
 
         Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId());
@@ -390,14 +404,14 @@
                     " x " + mCameraInfoCache.getRawStreamSize().getHeight());
         }
 
-        if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+        if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
             outputSurfaces.add(mJpegImageReader.getSurface());
             Log.v(TAG, "  .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() +
                     " x " + mCameraInfoCache.getJpegStreamSize().getHeight());
         }
 
         try {
-            if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+            if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
                 InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(),
                         mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888);
                 mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces,
@@ -441,28 +455,27 @@
     private boolean mCaptureYuv1 = false;
     private boolean mCaptureYuv2 = false;
     private boolean mCaptureRaw = false;
-    private int mCaptureNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
-    private int mCaptureEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
+    private int mCaptureNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_FAST;
+    private int mCaptureEdgeMode = CaptureRequest.EDGE_MODE_FAST;
     private boolean mCaptureFace = false;
     // Variables to hold reprocessing state.
-    private int mReprocessingNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
-    private int mReprocessingEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
-
+    private int mReprocessingNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY;
+    private int mReprocessingEdgeMode = CaptureRequest.EDGE_MODE_HIGH_QUALITY;
 
     public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) {
         if (yuv1 != null) mCaptureYuv1 = yuv1;
         if (yuv2 != null) mCaptureYuv2 = yuv2;
         if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE;
         if (nr) {
-            mCaptureNoiseIndex = ++mCaptureNoiseIndex % mCameraInfoCache.noiseModes.length;
+            mCaptureNoiseMode = getNextMode(mCaptureNoiseMode, mCameraInfoCache.noiseModes);
         }
         if (edge) {
-            mCaptureEdgeIndex = ++mCaptureEdgeIndex % mCameraInfoCache.edgeModes.length;
+            mCaptureEdgeMode = getNextMode(mCaptureEdgeMode, mCameraInfoCache.edgeModes);
         }
         if (face != null) mCaptureFace = face;
         mMyCameraCallback.setNoiseEdgeText(
-                "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mCaptureNoiseIndex]),
-                "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mCaptureEdgeIndex])
+                "NR " + noiseModeToString(mCaptureNoiseMode),
+                "Edge " + edgeModeToString(mCaptureEdgeMode)
         );
 
         if (mCurrentCaptureSession != null) {
@@ -472,14 +485,14 @@
 
     public void setReprocessingFlow(Boolean nr, Boolean edge) {
         if (nr) {
-            mReprocessingNoiseIndex = ++mReprocessingNoiseIndex % mCameraInfoCache.noiseModes.length;
+            mReprocessingNoiseMode = getNextMode(mReprocessingNoiseMode, mCameraInfoCache.noiseModes);
         }
         if (edge) {
-            mReprocessingEdgeIndex = ++mReprocessingEdgeIndex % mCameraInfoCache.edgeModes.length;
+            mReprocessingEdgeMode = getNextMode(mReprocessingEdgeMode, mCameraInfoCache.edgeModes);
         }
         mMyCameraCallback.setNoiseEdgeTextForReprocessing(
-                "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]),
-                "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mReprocessingEdgeIndex])
+                "NR " + noiseModeToString(mReprocessingNoiseMode),
+                "Edge " + edgeModeToString(mReprocessingEdgeMode)
         );
     }
 
@@ -496,11 +509,11 @@
                 b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
             }
 
-            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mCaptureNoiseIndex]);
-            b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mCaptureEdgeIndex]);
+            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCaptureNoiseMode);
+            b1.set(CaptureRequest.EDGE_MODE, mCaptureEdgeMode);
             b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
 
-            Log.v(TAG, "  .. NR=" + mCaptureNoiseIndex + "  Edge=" + mCaptureEdgeIndex + "  Face=" + mCaptureFace);
+            Log.v(TAG, "  .. NR=" + mCaptureNoiseMode + "  Edge=" + mCaptureEdgeMode + "  Face=" + mCaptureFace);
 
             if (mCaptureYuv1) {
                 b1.addTarget(mYuv1ImageReader.getSurface());
@@ -546,8 +559,8 @@
             // Portrait.
             b1.set(CaptureRequest.JPEG_ORIENTATION, 90);
             b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
-            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]);
-            b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mReprocessingEdgeIndex]);
+            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mReprocessingNoiseMode);
+            b1.set(CaptureRequest.EDGE_MODE, mReprocessingEdgeMode);
             b1.addTarget(mJpegImageReader.getSurface());
             mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler);
             mReprocessingRequestNanoTime = System.nanoTime();
@@ -775,6 +788,32 @@
      * UTILITY FUNCTIONS *
      *********************/
 
+    /**
+     * Return the next mode after currentMode in supportedModes, wrapping to
+     * start of mode list if currentMode is last.  Returns currentMode if it is not found in
+     * supportedModes.
+     *
+     * @param currentMode
+     * @param supportedModes
+     * @return next mode after currentMode in supportedModes
+     */
+    private int getNextMode(int currentMode, int[] supportedModes) {
+        boolean getNext = false;
+        for (int m : supportedModes) {
+            if (getNext) {
+                return m;
+            }
+            if (m == currentMode) {
+                getNext = true;
+            }
+        }
+        if (getNext) {
+            return supportedModes[0];
+        }
+        // Can't find mode in list
+        return currentMode;
+    }
+
     private static String edgeModeToString(int mode) {
         switch (mode) {
             case CaptureRequest.EDGE_MODE_OFF:
@@ -783,13 +822,12 @@
                 return "FAST";
             case CaptureRequest.EDGE_MODE_HIGH_QUALITY:
                 return "HiQ";
-            case 3:
+            case CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG:
                 return "ZSL";
         }
         return Integer.toString(mode);
     }
 
-
     private static String noiseModeToString(int mode) {
         switch (mode) {
             case CaptureRequest.NOISE_REDUCTION_MODE_OFF:
@@ -798,9 +836,9 @@
                 return "FAST";
             case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY:
                 return "HiQ";
-            case 3:
+            case CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL:
                 return "MIN";
-            case 4:
+            case CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG:
                 return "ZSL";
         }
         return Integer.toString(mode);
diff --git a/src/com/android/devcamera/CameraInfoCache.java b/src/com/android/devcamera/CameraInfoCache.java
index 03d27a4..1a5b0b1 100644
--- a/src/com/android/devcamera/CameraInfoCache.java
+++ b/src/com/android/devcamera/CameraInfoCache.java
@@ -24,6 +24,7 @@
 import android.os.Build;
 import android.util.Log;
 import android.util.Size;
+import android.util.SizeF;
 
 /**
  * Caches (static) information about the first/main camera.
@@ -33,13 +34,7 @@
 public class CameraInfoCache {
     private static final String TAG = "DevCamera_CAMINFO";
 
-    public static final boolean IS_NEXUS_5 = "hammerhead".equalsIgnoreCase(Build.DEVICE);
     public static final boolean IS_NEXUS_6 = "shamu".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_NEXUS_9 = "flounder".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_ANGLER = "angler".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_BULLHEAD = "bullhead".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_SAMSUNG_S6 = "zerofltevzw".equalsIgnoreCase(Build.DEVICE);
-    public static final boolean IS_LG_G4 = "p1_lgu_kr".equalsIgnoreCase(Build.PRODUCT);
 
     public int[] noiseModes;
     public int[] edgeModes;
@@ -53,7 +48,7 @@
     private Integer mSensorOrientation;
     private Integer mRawFormat;
     private int mBestFaceMode;
-    private boolean mCamera2FullModeAvailable;
+    private int mHardwareLevel;
 
     /**
      * Constructor.
@@ -115,37 +110,91 @@
         noiseModes = mCameraCharacteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES);
 
         // Misc stuff.
-        int hwLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-
-        mCamera2FullModeAvailable = (hwLevel != CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY)
-                && (hwLevel >= CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
+        mHardwareLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
 
         mSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
     }
 
+    boolean supportedModesContains(int[] modes, int mode) {
+        for (int m : modes) {
+            if (m == mode) return true;
+        }
+        return false;
+    }
+
     public int sensorOrientation() {
         return mSensorOrientation;
     }
 
     public boolean isCamera2FullModeAvailable() {
-        return mCamera2FullModeAvailable;
+        return isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
+    }
+
+    public boolean isHardwareLevelAtLeast(int level) {
+        // Special-case LEGACY since it has numerical value 2
+        if (level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+            // All devices are at least LEGACY
+            return true;
+        }
+        if (mHardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+            // Since level isn't LEGACY
+            return false;
+        }
+        // All other levels can be compared numerically
+        return mHardwareLevel >= level;
+    }
+
+    public boolean isCapabilitySupported(int capability) {
+        int[] caps = mCameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+        for (int c: caps) {
+            if (c == capability) return true;
+        }
+        return false;
     }
 
     public float getDiopterLow() {
-        if (IS_NEXUS_6) {
-            return 0f;
-        }
         return 0f; // Infinity
     }
 
     public float getDiopterHi() {
-        if (IS_NEXUS_6) {
-            return 14.29f;
-        }
-        return 16f;
+        Float minFocusDistance =
+                mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+        // LEGACY devices don't report this, but they won't report focus distance anyway, so just
+        // default to zero
+        return (minFocusDistance == null) ? 0.0f : minFocusDistance;
     }
 
     /**
+     * Calculate camera device horizontal and vertical fields of view.
+     *
+     * @return horizontal and vertical field of view, in degrees.
+     */
+    public float[] getFieldOfView() {
+        float[] availableFocalLengths =
+                mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
+        float focalLength = 4.5f; // mm, default from Nexus 6P
+        if (availableFocalLengths == null || availableFocalLengths.length == 0) {
+            Log.e(TAG, "No focal length reported by camera device, assuming default " +
+                    focalLength);
+        } else {
+            focalLength = availableFocalLengths[0];
+        }
+        SizeF physicalSize =
+                mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE);
+        if (physicalSize == null) {
+            physicalSize = new SizeF(6.32f, 4.69f); // mm, default from Nexus 6P
+            Log.e(TAG, "No physical sensor dimensions reported by camera device, assuming default "
+                    + physicalSize);
+        }
+        // Simple rectilinear lens field of view formula:
+        //   angle of view = 2 * arctan ( sensor size / (2 * focal length) )
+        float[] fieldOfView = new float[2];
+        fieldOfView[0] = (float) Math.toDegrees(2 * Math.atan(physicalSize.getWidth() / 2 / focalLength));
+        fieldOfView[1] = (float) Math.toDegrees(2 * Math.atan(physicalSize.getHeight() / 2 / focalLength));
+
+        return fieldOfView;
+    }
+    /**
      * Private utility function.
      */
     private Size returnLargestSize(Size[] sizes) {
@@ -194,7 +243,8 @@
         if (aspect > 1.6) {
             return new Size(1920, 1080); // TODO: Check available resolutions.
         }
-        if (IS_ANGLER || IS_BULLHEAD) {
+        if (isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
+            // Bigger preview size for more advanced devices
             return new Size(1440, 1080);
         }
         return new Size(1280, 960); // TODO: Check available resolutions.
@@ -215,9 +265,9 @@
     public boolean rawAvailable() {
         return mRawSize != null;
     }
-    public boolean reprocessingAvailable() {
-        // TODO: Actually query capabilities list.
-        return (IS_ANGLER || IS_BULLHEAD);
+    public boolean isYuvReprocessingAvailable() {
+        return isCapabilitySupported(
+                CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
     }
 
     public Integer getRawFormat() {
diff --git a/src/com/android/devcamera/CameraInterface.java b/src/com/android/devcamera/CameraInterface.java
index 61c1a63..e16c9e5 100644
--- a/src/com/android/devcamera/CameraInterface.java
+++ b/src/com/android/devcamera/CameraInterface.java
@@ -28,6 +28,11 @@
     Size getPreviewSize();
 
     /**
+     * Get camera field of view, in degrees. Entry 0 is horizontal, entry 1 is vertical FOV.
+     */
+    float[] getFieldOfView();
+
+    /**
      * Open the camera. Call startPreview() to actually see something.
      */
     void openCamera();
diff --git a/src/com/android/devcamera/DevCameraActivity.java b/src/com/android/devcamera/DevCameraActivity.java
index fe49a3a..6194915 100644
--- a/src/com/android/devcamera/DevCameraActivity.java
+++ b/src/com/android/devcamera/DevCameraActivity.java
@@ -19,6 +19,7 @@
 import android.content.Intent;
 import android.content.pm.PackageManager;
 import android.graphics.Color;
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.SensorManager;
 import android.os.Bundle;
@@ -538,12 +539,11 @@
     GyroOperations mGyroOperations;
 
     private void startGyroDisplay() {
-        // TODO: Get field of view angles from Camera API.
-        // TODO: Consider turning OIS off.
-        float fovLargeDegrees = 62.7533f; // Nexus 6
-        float fovSmallDegrees = 49.157f; // Nexus 6
-        mPreviewOverlay.setFieldOfView(fovLargeDegrees, fovSmallDegrees);
 
+        float[] fovs = mCamera.getFieldOfView();
+        mPreviewOverlay.setFieldOfView(fovs[0], fovs[1]);
+        mPreviewOverlay.setFacing(mToggleFrontCam.isChecked() ?
+                CameraCharacteristics.LENS_FACING_FRONT : CameraCharacteristics.LENS_FACING_BACK);
         if (mGyroOperations == null) {
             SensorManager sensorManager = (SensorManager) getSystemService(this.SENSOR_SERVICE);
             mGyroOperations = new GyroOperations(sensorManager);
diff --git a/src/com/android/devcamera/PreviewOverlay.java b/src/com/android/devcamera/PreviewOverlay.java
index dff60e9..7a4dd02 100644
--- a/src/com/android/devcamera/PreviewOverlay.java
+++ b/src/com/android/devcamera/PreviewOverlay.java
@@ -21,6 +21,7 @@
 import android.graphics.Paint;
 import android.graphics.PointF;
 import android.graphics.RectF;
+import android.hardware.camera2.CameraCharacteristics;
 import android.util.AttributeSet;
 import android.view.View;
 
@@ -41,6 +42,7 @@
     private int mAfState;
     private float mFovLargeDegrees;
     private float mFovSmallDegrees;
+    private int mFacing = CameraCharacteristics.LENS_FACING_BACK;
     float[] mAngles = new float[2];
 
     public PreviewOverlay(Context context, AttributeSet attrs) {
@@ -69,6 +71,14 @@
         invalidate();
     }
 
+    /**
+     * Set the facing of the current camera, for correct coordinate mapping.
+     * One of the CameraCharacteristics.LENS_INFO_FACING_* constants
+     */
+    public void setFacing(int facing) {
+        mFacing = facing;
+    }
+
     public void show3AInfo(boolean show) {
         mShow3AInfo = show;
         this.setVisibility(VISIBLE);
@@ -76,7 +86,14 @@
     }
 
     public void setGyroAngles(float[] angles) {
-        mAngles = angles;
+        if (mFacing == CameraCharacteristics.LENS_FACING_BACK) {
+            // Reverse sensor readout for front/external facing cameras
+            mAngles[0] = angles[0];
+            mAngles[1] = angles[1];
+        } else {
+            mAngles[0] = -angles[0];
+            mAngles[1] = -angles[1];
+        }
     }
 
     public void setFieldOfView(float fovLargeDegrees, float fovSmallDegrees) {
@@ -201,7 +218,6 @@
             float focalLengthH = 0.5f * previewH / (float) Math.tan(Math.toRadians(mFovLargeDegrees) * 0.5);
             float focalLengthW = 0.5f * previewW / (float) Math.tan(Math.toRadians(mFovSmallDegrees) * 0.5);
             final double ANGLE_STEP = (float) Math.toRadians(10f);
-
             // Draw horizontal lines, with 10 degree spacing.
             double phase1 = mAngles[0] % ANGLE_STEP;
             for (double i = -5 * ANGLE_STEP + phase1; i < 5 * ANGLE_STEP; i += ANGLE_STEP) {