DO NOT MERGE: Merge Oreo MR1 into master

Exempt-From-Owner-Approval: Changes already landed internally
Change-Id: I539da7898a79de815d734f2f74e71e2f263c7496
diff --git a/camera/Android.mk b/camera/Android.mk
index 0593872..619c2a6 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -11,6 +11,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-ifneq ($(filter muskie walleye taimen,$(TARGET_DEVICE)),)
+ifeq ($(BOARD_USES_EASEL),true)
     include $(call all-subdir-makefiles)
-endif
\ No newline at end of file
+endif
diff --git a/camera/include/HdrPlusTypes.h b/camera/include/HdrPlusTypes.h
index 034ef6c..9f66ffb 100644
--- a/camera/include/HdrPlusTypes.h
+++ b/camera/include/HdrPlusTypes.h
@@ -149,6 +149,10 @@
     // Easel, this offset should be subtracted from AP timestamp.
     int64_t timestampOffsetNs;
 
+    // Sensor timestamp offset due to sensor cropping. When comparing timestamps between AP and
+    // Easel, this offset should be subtracted from AP timestamp.
+    int64_t timestampCropOffsetNs;
+
     // Sensor output format as defined in android_pixel_format.
     int format;
 
@@ -224,6 +228,11 @@
             std::array<T, SIZE> values);
 } // namespace metadatautils
 
+static const uint32_t DEBUG_PARAM_NONE                      = 0u;
+static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_METERING = (1u);
+static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_PAYLOAD  = (1u << 1);
+static const uint32_t DEBUG_PARAM_SAVE_GCAME_TEXT           = (1u << 2);
+
 /*
  * StaticMetadata defines a camera device's characteristics.
  *
@@ -252,30 +261,10 @@
     std::vector<float> availableFocalLengths; // android.lens.info.availableFocalLengths
     std::array<int32_t, 2> shadingMapSize; // android.lens.info.shadingMapSize
     uint8_t focusDistanceCalibration; // android.lens.info.focusDistanceCalibration
+    std::array<int32_t, 2> aeCompensationRange; // android.control.aeCompensationRange
+    float aeCompensationStep; // android.control.aeCompensationStep
 
-    // Check if the contents of lhs and rhs are equal. For vector and array variables, two are
-    // equal if their elements are equal at the same position.
-    bool operator==(const StaticMetadata& rhs) const {
-        return flashInfoAvailable == rhs.flashInfoAvailable &&
-               sensitivityRange == rhs.sensitivityRange &&
-               maxAnalogSensitivity == rhs.maxAnalogSensitivity &&
-               pixelArraySize == rhs.pixelArraySize &&
-               activeArraySize == rhs.activeArraySize &&
-               opticalBlackRegions == rhs.opticalBlackRegions &&
-               availableStreamConfigurations == rhs.availableStreamConfigurations &&
-               referenceIlluminant1 == rhs.referenceIlluminant1 &&
-               referenceIlluminant2 == rhs.referenceIlluminant2 &&
-               calibrationTransform1 == rhs.calibrationTransform1 &&
-               calibrationTransform2 == rhs.calibrationTransform2 &&
-               colorTransform1 == rhs.colorTransform1 &&
-               colorTransform2 == rhs.colorTransform2 &&
-               whiteLevel == rhs.whiteLevel &&
-               colorFilterArrangement == rhs.colorFilterArrangement &&
-               availableApertures == rhs.availableApertures &&
-               availableFocalLengths == rhs.availableFocalLengths &&
-               shadingMapSize == rhs.shadingMapSize &&
-               focusDistanceCalibration == rhs.focusDistanceCalibration;
-    }
+    uint32_t debugParams; // Use HDRPLUS_DEBUG_PARAM_*
 
     // Convert this static metadata to a string and append it to the specified string.
     void appendToString(std::string *strOut) const {
@@ -308,6 +297,11 @@
         metadatautils::appendVectorOrArrayToString(strOut, "shadingMapSize", shadingMapSize);
         metadatautils::appendValueToString(strOut, "focusDistanceCalibration",
                 focusDistanceCalibration);
+        metadatautils::appendVectorOrArrayToString(strOut, "aeCompensationRange",
+                aeCompensationRange);
+        metadatautils::appendValueToString(strOut, "aeCompensationStep",
+                aeCompensationStep);
+        metadatautils::appendValueToString(strOut, "debugParams", debugParams);
     }
 };
 
@@ -340,31 +334,12 @@
     std::array<float, 4> dynamicBlackLevel; // android.sensor.dynamicBlackLevel
     std::vector<float> lensShadingMap; // android.statistics.lensShadingMap
     float focusDistance; // android.lens.focusDistance
-
-    // Check if the contents of lhs and rhs are equal. For vector and array variables, two are
-    // equal if their elements are equal at the same position.
-    bool operator==(const FrameMetadata& rhs) const {
-        return easelTimestamp == rhs.easelTimestamp &&
-               exposureTime == rhs.exposureTime &&
-               sensitivity == rhs.sensitivity &&
-               postRawSensitivityBoost == rhs.postRawSensitivityBoost &&
-               flashMode == rhs.flashMode &&
-               colorCorrectionGains == rhs.colorCorrectionGains &&
-               colorCorrectionTransform == rhs.colorCorrectionTransform &&
-               neutralColorPoint == rhs.neutralColorPoint &&
-               timestamp == rhs.timestamp &&
-               blackLevelLock == rhs.blackLevelLock &&
-               faceDetectMode == rhs.faceDetectMode &&
-               faceIds == rhs.faceIds &&
-               faceLandmarks == rhs.faceLandmarks &&
-               faceRectangles == rhs.faceRectangles &&
-               faceScores == rhs.faceScores &&
-               sceneFlicker == rhs.sceneFlicker &&
-               noiseProfile == rhs.noiseProfile &&
-               dynamicBlackLevel == rhs.dynamicBlackLevel &&
-               lensShadingMap == rhs.lensShadingMap &&
-               focusDistance == rhs.focusDistance;
-    }
+    int32_t aeExposureCompensation; // android.control.aeExposureCompensation
+    uint8_t aeMode; // android.control.aeMode
+    uint8_t aeLock; // android.control.aeLock
+    uint8_t aeState; // android.control.aeState
+    uint8_t aePrecaptureTrigger; // android.control.aePrecaptureTrigger
+    std::vector<std::array<int32_t, 5>> aeRegions; // android.control.aeRegions
 
     // Convert this static metadata to a string and append it to the specified string.
     void appendToString(std::string *strOut) const {
@@ -393,6 +368,35 @@
         metadatautils::appendVectorOrArrayToString(strOut, "dynamicBlackLevel", dynamicBlackLevel);
         metadatautils::appendVectorOrArrayToString(strOut, "lensShadingMap", lensShadingMap);
         metadatautils::appendValueToString(strOut, "focusDistance", focusDistance);
+        metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
+        metadatautils::appendValueToString(strOut, "aeMode", aeMode);
+        metadatautils::appendValueToString(strOut, "aeLock", aeLock);
+        metadatautils::appendValueToString(strOut, "aeState", aeState);
+        metadatautils::appendValueToString(strOut, "aePrecaptureTrigger", aePrecaptureTrigger);
+        metadatautils::appendVectorArrayToString(strOut, "aeRegions", aeRegions);
+    }
+};
+
+/*
+ * RequestMetadata defines the properties for a capture request.
+ *
+ * If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
+ * MessengerListenerFromHdrPlusService should also be updated.
+ */
+struct RequestMetadata {
+    std::array<int32_t, 4> cropRegion; // android.scaler.cropRegion (x_min, y_min, width, height)
+    int32_t aeExposureCompensation; // android.control.aeExposureCompensation
+
+    bool postviewEnable; // com.google.nexus.experimental2017.stats.postview_enable
+    bool continuousCapturing; // Whether to capture RAW while HDR+ processing.
+
+    // Convert this static metadata to a string and append it to the specified string.
+    void appendToString(std::string *strOut) const {
+        if (strOut == nullptr) return;
+        metadatautils::appendVectorOrArrayToString(strOut, "cropRegion", cropRegion);
+        metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
+        metadatautils::appendValueToString(strOut, "postviewEnable", postviewEnable);
+        metadatautils::appendValueToString(strOut, "continuousCapturing", continuousCapturing);
     }
 };
 
@@ -406,19 +410,14 @@
     int64_t easelTimestamp; // Easel timestamp of SOF of the base frame.
     int64_t timestamp; // android.sensor.timestamp. AP timestamp of exposure start of the base
                        // frame.
-
-    // Check if the contents of lhs and rhs are equal. For vector and array variables, two are
-    // equal if their elements are equal at the same position.
-    bool operator==(const ResultMetadata& rhs) const {
-        return easelTimestamp == rhs.easelTimestamp &&
-               timestamp == rhs.timestamp;
-    }
+    std::string makernote; // Obfuscated capture information.
 
     // Convert this static metadata to a string and append it to the specified string.
     void appendToString(std::string *strOut) const {
         if (strOut == nullptr) return;
         metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
         metadatautils::appendValueToString(strOut, "timestamp", timestamp);
+        metadatautils::appendValueToString(strOut, "makernote", makernote.size());
     }
 };
 
diff --git a/camera/libhdrplusclient/Android.mk b/camera/libhdrplusclient/Android.mk
index 562e02c..a24eeb2 100644
--- a/camera/libhdrplusclient/Android.mk
+++ b/camera/libhdrplusclient/Android.mk
@@ -33,6 +33,12 @@
     LOCAL_SHARED_LIBRARIES += libhdrplusclientimpl
 endif
 
+LOCAL_HEADER_LIBRARIES := \
+    libsystem_headers \
+    libutils_headers
+LOCAL_EXPORT_HEADER_LIBRARY_HEADERS := \
+    libutils_headers
+
 LOCAL_C_INCLUDES += \
     $(LOCAL_PATH)/include \
     hardware/google/easel/camera/include
diff --git a/camera/libhdrplusclient/HdrPlusClientUtils.cpp b/camera/libhdrplusclient/HdrPlusClientUtils.cpp
index a48210f..1fb6838 100644
--- a/camera/libhdrplusclient/HdrPlusClientUtils.cpp
+++ b/camera/libhdrplusclient/HdrPlusClientUtils.cpp
@@ -56,6 +56,21 @@
             *b = std::min(std::max(yc + 1.772431f * uc - 0.006137f * vc, 0.0f), 255.0f);
             return OK;
         }
+        case HAL_PIXEL_FORMAT_RGB_888:
+        {
+            // Check the stream configuration has 1 plane.
+            if (streamConfig.image.planes.size() != 1) {
+                ALOGE("%s: RGB_888 should have 1 plane but it has %zu", __FUNCTION__,
+                        streamConfig.image.planes.size());
+                return BAD_VALUE;
+            }
+
+            uint32_t offset = y * streamConfig.image.planes[0].stride + x * 3;
+            *r = ((uint8_t*)buffer.data)[offset];
+            *g = ((uint8_t*)buffer.data)[offset + 1];
+            *b = ((uint8_t*)buffer.data)[offset + 2];
+            return OK;
+        }
         default:
             ALOGE("%s: Format %d is not supported.", __FUNCTION__, streamConfig.image.format);
             return BAD_VALUE;
@@ -64,7 +79,8 @@
 
 status_t writePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
         const pbcamera::StreamBuffer &buffer) {
-    if (streamConfig.image.format != HAL_PIXEL_FORMAT_YCrCb_420_SP) {
+    if (streamConfig.image.format != HAL_PIXEL_FORMAT_YCrCb_420_SP &&
+            streamConfig.image.format != HAL_PIXEL_FORMAT_RGB_888) {
         ALOGE("%s: format 0x%x is not supported.", __FUNCTION__, streamConfig.image.format);
         return BAD_VALUE;
     }
@@ -101,5 +117,81 @@
     return OK;
 }
 
+status_t comparePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
+        const pbcamera::StreamBuffer &buffer, float *diffRatio) {
+    if (streamConfig.image.format != HAL_PIXEL_FORMAT_YCrCb_420_SP) {
+        ALOGE("%s: format 0x%x is not supported.", __FUNCTION__, streamConfig.image.format);
+        return BAD_VALUE;
+    }
+
+    std::ifstream ifile(filename, std::ios::binary);
+    if (!ifile.is_open()) {
+        ALOGE("%s: Opening file (%s) failed.", __FUNCTION__, filename.data());
+        return NO_INIT;
+    }
+
+    std::string s;
+
+    // Read headers of the ppm file.
+    ifile >> s;
+    if (s != "P6") {
+        ALOGE("%s: Invalid PPM file header: %s", __FUNCTION__, s.c_str());
+        return BAD_VALUE;
+    }
+
+    // Read width and height.
+    ifile >> s;
+    uint32_t width = std::stoul(s);
+
+    ifile >> s;
+    uint32_t height = std::stoul(s);
+
+    if (width != streamConfig.image.width || height != streamConfig.image.height) {
+        ALOGE("%s: Image resolution doesn't match. image %dx%d ppm %dx%d",
+                __FUNCTION__, streamConfig.image.width, streamConfig.image.height,
+                width, height);
+        return BAD_VALUE;
+    }
+
+    ifile >> s;
+    if (s != "255") {
+        ALOGE("%s: Expecting 255 but got %s", __FUNCTION__, s.c_str());
+        return BAD_VALUE;
+    }
+
+    char c;
+
+    // Get a space
+    ifile.get(c);
+
+    // Now the RGB values start.
+    uint8_t r, g, b;
+    uint64_t diff = 0;
+
+    for (uint32_t y = 0; y < height; y++) {
+        for (uint32_t x = 0; x < width; x++) {
+            status_t res = getRgb(&r, &g, &b, x, y, streamConfig, buffer);
+            if (res != OK) {
+                ALOGE("%s: Getting RGB failed: %s (%d).", __FUNCTION__, strerror(-res), res);
+                return res;
+            }
+
+            // Get r, g, b from golden image and accumulate the differences.
+            ifile.get(c);
+            diff += abs(static_cast<int32_t>(c) - r);
+            ifile.get(c);
+            diff += abs(static_cast<int32_t>(c) - g);
+            ifile.get(c);
+            diff += abs(static_cast<int32_t>(c) - b);
+        }
+    }
+
+    if (diffRatio != nullptr) {
+        *diffRatio = diff / (static_cast<float>(width) * height * 3 * 256);
+    }
+
+    return OK;
+}
+
 } // hdrplus_client_utils
 } // namespace android
diff --git a/camera/libhdrplusclient/include/EaselManagerClient.h b/camera/libhdrplusclient/include/EaselManagerClient.h
index 3234f34..682cb67 100644
--- a/camera/libhdrplusclient/include/EaselManagerClient.h
+++ b/camera/libhdrplusclient/include/EaselManagerClient.h
@@ -21,6 +21,8 @@
 #include <utils/Errors.h>
 #include <utils/Mutex.h>
 
+#define FW_VER_SIZE 24
+
 namespace android {
 
 class EaselManagerClientListener;
@@ -65,6 +67,13 @@
     virtual status_t resume(EaselManagerClientListener *listener) = 0;
 
     /*
+     * Retrieve Easel firmware version.
+     *
+     * Firmware version string is added to image exif
+     */
+    virtual status_t getFwVersion(char *fwVersion) = 0;
+
+    /*
      * Start MIPI with an output pixel lock rate for a camera.
      *
      * Can be called when Easel is powered on or resumed, for Easel to start sending sensor data
diff --git a/camera/libhdrplusclient/include/HdrPlusClient.h b/camera/libhdrplusclient/include/HdrPlusClient.h
index 962a5e8..40905cc 100644
--- a/camera/libhdrplusclient/include/HdrPlusClient.h
+++ b/camera/libhdrplusclient/include/HdrPlusClient.h
@@ -17,10 +17,12 @@
 #ifndef HDR_PLUS_CLIENT_H
 #define HDR_PLUS_CLIENT_H
 
+#include "CameraMetadata.h"
 #include "hardware/camera3.h"
 #include "HdrPlusClientListener.h"
 #include "HdrPlusTypes.h"
 
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
 namespace android {
 
 /**
@@ -31,7 +33,9 @@
  */
 class HdrPlusClient {
 public:
-    HdrPlusClient() {};
+    // HdrPlusClientListener is the listener to receive callbacks from HDR+ client. The listener
+    // must be valid during the life cycle of HdrPlusClient
+    HdrPlusClient(HdrPlusClientListener *) {};
     /*
      * The recommended way to create an HdrPlusClient instance is via
      * EaselManagerClient::openHdrPlusClientAsync() or EaselManagerClient::openHdrPlusClientAsync().
@@ -45,17 +49,12 @@
      *
      * If EaselManagerClient is used to create the HdrPlusClient, it is already connected.
      *
-     * listener is the listener to receive callbacks from HDR+ client.
-     *
      * Returns:
      *  0:          on success.
      *  -EEXIST:    if it's already connected.
      *  -ENODEV:    if connecting failed due to a serious error.
      */
-    virtual status_t connect(HdrPlusClientListener *listener) = 0;
-
-    // Disconnect from HDR+ service.
-    virtual void disconnect() = 0;
+    virtual status_t connect() = 0;
 
     /*
      * Set the static metadata of current camera device.
@@ -111,12 +110,14 @@
      * buffer will be returned in CaptureResult only once.
      *
      * request is a CaptureRequest containing output buffers to be filled by HDR+ service.
+     * requestMetadata is the metadata for this request.
      *
      * Returns:
      *  0:              on success.
      *  -EINVAL:        if the request is invalid such as containing invalid stream IDs.
      */
-    virtual status_t submitCaptureRequest(pbcamera::CaptureRequest *request) = 0;
+    virtual status_t submitCaptureRequest(pbcamera::CaptureRequest *request,
+            const CameraMetadata &requestMetadata) = 0;
 
     /*
      * Send an input buffer to HDR+ service. This is used when HDR+ service's input buffers come
diff --git a/camera/libhdrplusclient/include/HdrPlusClientListener.h b/camera/libhdrplusclient/include/HdrPlusClientListener.h
index 6531c9b..30b817e 100644
--- a/camera/libhdrplusclient/include/HdrPlusClientListener.h
+++ b/camera/libhdrplusclient/include/HdrPlusClientListener.h
@@ -69,6 +69,23 @@
      * will contain distinct output buffers that have not been received yet.
      */
     virtual void onFailedCaptureResult(pbcamera::CaptureResult *failedResult) = 0;
+
+    /*
+     * Invoked when HDR+ processing has started for a request. requestId is the ID of the request.
+     * apSensorTimestampNs is the AP sensor timestamp of the base frame, in nanoseconds.
+     */
+    virtual void onShutter(uint32_t requestId, int64_t apSensorTimestampNs) = 0;
+
+    /*
+     * Invoked when Easel is ready to take another HDR+ request.
+     */
+    virtual void onNextCaptureReady(uint32_t requestId) = 0;
+
+    /*
+     * Invoked when the postview for a request is ready.
+     */
+    virtual void onPostview(uint32_t requestId, std::unique_ptr<std::vector<uint8_t>> postview,
+            uint32_t width, uint32_t height, uint32_t stride, int32_t format) = 0;
 };
 
 } // namespace android
diff --git a/camera/libhdrplusclient/include/HdrPlusClientUtils.h b/camera/libhdrplusclient/include/HdrPlusClientUtils.h
index f56f380..f9b126f 100644
--- a/camera/libhdrplusclient/include/HdrPlusClientUtils.h
+++ b/camera/libhdrplusclient/include/HdrPlusClientUtils.h
@@ -39,6 +39,24 @@
 status_t writePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
         const pbcamera::StreamBuffer &buffer);
 
+/*
+ * Compare the image buffer against a golden .ppm file.
+ *
+ * filename is the filename of the .ppm file and should include ".ppm" in the end.
+ * streamConfig is the stream configuration of the buffer.
+ * buffer is the buffer to be compared.
+ * diffRatio will be the difference ratio between the image buffer and the golden ppm file.
+ *           It's calculated as sum(R, G, B diffs in each pixel) / (width * height * 256 * 3)
+ *
+ * Returns
+ *  OK:             if the comparison completed successfully.
+ *  BAD_VALUE:      if the format is not support or the stream configuration is invalid, or the
+ *                  file cannot be parsed correctly.
+ *  NO_INIT:        if it failed to open the file.
+ */
+status_t comparePpm(const std::string &filename, const pbcamera::StreamConfiguration &streamConfig,
+        const pbcamera::StreamBuffer &buffer, float *diffRatio);
+
 } // hdrplus_client_utils
 } // namespace android