QCamera3: Adding Urgent Metadata

Adding urgent Metadata to report 3A states as quickly
as possible

Uses HAL3.1 quirk field for partial results.

Bug: 11115603

Change-Id: I6cdb65dd7cb6d23ef9de244cbb55ebc1a653590f
diff --git a/QCamera2/HAL3/QCamera3HWI.cpp b/QCamera2/HAL3/QCamera3HWI.cpp
index 70a6e41..c91c434 100644
--- a/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/QCamera2/HAL3/QCamera3HWI.cpp
@@ -976,40 +976,81 @@
     cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
         POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
 
+    int32_t urgent_frame_number_valid = *(int32_t *)
+        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
+    uint32_t urgent_frame_number = *(uint32_t *)
+        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
+
+    if (urgent_frame_number_valid) {
+        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
+          __func__, urgent_frame_number, capture_time);
+
+        //Recieved an urgent Frame Number, handle it
+        //using HAL3.1 quirk for partial results
+        for (List<PendingRequestInfo>::iterator i =
+            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
+            camera3_notify_msg_t notify_msg;
+            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
+                __func__, i->frame_number, urgent_frame_number);
+
+            if (i->frame_number < urgent_frame_number &&
+                i->bNotified == 0) {
+                notify_msg.type = CAMERA3_MSG_SHUTTER;
+                notify_msg.message.shutter.frame_number = i->frame_number;
+                notify_msg.message.shutter.timestamp = capture_time -
+                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                i->timestamp = notify_msg.message.shutter.timestamp;
+                i->bNotified = 1;
+                ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
+                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
+            }
+
+            if (i->frame_number == urgent_frame_number) {
+
+                camera3_capture_result_t result;
+
+                // Send shutter notify to frameworks
+                notify_msg.type = CAMERA3_MSG_SHUTTER;
+                notify_msg.message.shutter.frame_number = i->frame_number;
+                notify_msg.message.shutter.timestamp = capture_time;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+                i->timestamp = capture_time;
+                i->bNotified = 1;
+
+                // Extract 3A metadata
+                result.result =
+                    translateCbUrgentMetadataToResultMetadata(metadata);
+                // Populate metadata result
+                result.frame_number = urgent_frame_number;
+                result.num_output_buffers = 0;
+                result.output_buffers = NULL;
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
+                     __func__, result.frame_number, capture_time);
+                free_camera_metadata((camera_metadata_t *)result.result);
+                break;
+            }
+        }
+    }
+
     if (!frame_number_valid) {
-        ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
+        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
         mMetadataChannel->bufDone(metadata_buf);
         free(metadata_buf);
         goto done_metadata;
     }
-    ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
+    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
             frame_number, capture_time);
 
     // Go through the pending requests info and send shutter/results to frameworks
     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
         camera3_capture_result_t result;
-        camera3_notify_msg_t notify_msg;
         ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
 
         // Flush out all entries with less or equal frame numbers.
-
-        //TODO: Make sure shutter timestamp really reflects shutter timestamp.
-        //Right now it's the same as metadata timestamp
-
-        //TODO: When there is metadata drop, how do we derive the timestamp of
-        //dropped frames? For now, we fake the dropped timestamp by substracting
-        //from the reported timestamp
-        nsecs_t current_capture_time = capture_time -
-            (frame_number - i->frame_number) * NSEC_PER_33MSEC;
-
-        // Send shutter notify to frameworks
-        notify_msg.type = CAMERA3_MSG_SHUTTER;
-        notify_msg.message.shutter.frame_number = i->frame_number;
-        notify_msg.message.shutter.timestamp = current_capture_time;
-        mCallbackOps->notify(mCallbackOps, &notify_msg);
-        ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
-                i->frame_number, capture_time);
         mPendingRequest--;
 
         // Check whether any stream buffer corresponding to this is dropped or not
@@ -1045,13 +1086,13 @@
         if (i->frame_number < frame_number) {
             CameraMetadata dummyMetadata;
             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
-                    &current_capture_time, 1);
+                    &i->timestamp, 1);
             dummyMetadata.update(ANDROID_REQUEST_ID,
                     &(i->request_id), 1);
             result.result = dummyMetadata.release();
         } else {
             result.result = translateCbMetadataToResultMetadata(metadata,
-                    current_capture_time, i->request_id, i->blob_request,
+                    i->timestamp, i->request_id, i->blob_request,
                     &(i->input_jpeg_settings));
             if (mIsZslMode) {
                 int found_metadata = 0;
@@ -1149,13 +1190,13 @@
 
             mCallbackOps->process_capture_result(mCallbackOps, &result);
             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
-                    __func__, result.frame_number, current_capture_time);
+                    __func__, result.frame_number, i->timestamp);
             free_camera_metadata((camera_metadata_t *)result.result);
             delete[] result_buffers;
         } else {
             mCallbackOps->process_capture_result(mCallbackOps, &result);
             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
-                        __func__, result.frame_number, current_capture_time);
+                        __func__, result.frame_number, i->timestamp);
             free_camera_metadata((camera_metadata_t *)result.result);
         }
         // erase the element from the list
@@ -1474,6 +1515,7 @@
     pendingRequest.num_buffers = request->num_output_buffers;
     pendingRequest.request_id = request_id;
     pendingRequest.blob_request = blob_request;
+    pendingRequest.bNotified = 0;
     if (blob_request)
         pendingRequest.input_jpeg_settings = *mJpegSettings;
     pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
@@ -1785,83 +1827,23 @@
              camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
              break;
           }
-         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
-             int32_t  *ae_precapture_id =
-                     (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
-             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
-             break;
-          }
-         case CAM_INTF_META_AEC_ROI: {
-            cam_area_t  *hAeRegions =
-                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
-             int32_t aeRegions[5];
-             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
-             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
-             break;
-          }
-          case CAM_INTF_META_AEC_STATE:{
-             uint8_t *ae_state =
-                  (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
-             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
-             break;
-          }
-          case CAM_INTF_PARM_FOCUS_MODE:{
-             uint8_t  *focusMode =
-                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
-             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
-                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
-             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
-             break;
-          }
-          case CAM_INTF_META_AF_ROI:{
-             /*af regions*/
-             cam_area_t  *hAfRegions =
-                  (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
-             int32_t afRegions[5];
-             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
-             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
-             break;
-          }
-          case CAM_INTF_META_AF_STATE: {
-             uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
-             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
-             break;
-          }
-          case CAM_INTF_META_AF_TRIGGER_ID: {
-             int32_t  *afTriggerId =
-                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
-             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
-             break;
-          }
-          case CAM_INTF_PARM_WHITE_BALANCE: {
-               uint8_t  *whiteBalance =
-                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
-               uint8_t fwkWhiteBalanceMode = lookupFwkName(WHITE_BALANCE_MODES_MAP,
-                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
-                   *whiteBalance);
-               camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
-               break;
-          }
-          case CAM_INTF_META_AWB_REGIONS: {
-             /*awb regions*/
-             cam_area_t  *hAwbRegions =
-                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
-             int32_t awbRegions[5];
-             convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
-             camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
-             break;
-          }
-          case CAM_INTF_META_AWB_STATE: {
-             uint8_t  *whiteBalanceState =
-                (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
-             camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
-             break;
-          }
-          case CAM_INTF_META_MODE: {
-             uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
-             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
-             break;
-          }
+
+         // 3A state is sent in urgent partial result (uses quirk)
+         case CAM_INTF_META_AEC_PRECAPTURE_ID:
+         case CAM_INTF_META_AEC_ROI:
+         case CAM_INTF_META_AEC_STATE:
+         case CAM_INTF_PARM_FOCUS_MODE:
+         case CAM_INTF_META_AF_ROI:
+         case CAM_INTF_META_AF_STATE:
+         case CAM_INTF_META_AF_TRIGGER_ID:
+         case CAM_INTF_PARM_WHITE_BALANCE:
+         case CAM_INTF_META_AWB_REGIONS:
+         case CAM_INTF_META_AWB_STATE:
+         case CAM_INTF_META_MODE: {
+           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
+           break;
+         }
+
           case CAM_INTF_META_EDGE_MODE: {
              cam_edge_application_t  *edgeApplication =
                 (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
@@ -2115,6 +2097,134 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : translateCbUrgentMetadataToResultMetadata
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *
+ * RETURN     : camera_metadata_t*
+ *              metadata in a format specified by fwk
+ *==========================================================================*/
+camera_metadata_t*
+QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
+                                (metadata_buffer_t *metadata) {
+
+    CameraMetadata camMetadata;
+    camera_metadata_t* resultMetadata;
+
+    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
+    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
+
+    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
+    uint8_t next_entry;
+    while (curr_entry != CAM_INTF_PARM_MAX) {
+      switch (curr_entry) {
+        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
+            int32_t  *ae_precapture_id =
+              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+                                          ae_precapture_id, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
+          break;
+        }
+        case CAM_INTF_META_AEC_ROI: {
+            cam_area_t  *hAeRegions =
+                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
+            int32_t aeRegions[5];
+            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
+            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
+            break;
+        }
+        case CAM_INTF_META_AEC_STATE:{
+            uint8_t *ae_state =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
+            break;
+        }
+        case CAM_INTF_PARM_FOCUS_MODE:{
+            uint8_t  *focusMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
+            uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
+               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
+            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_ROI:{
+            /*af regions*/
+            cam_area_t  *hAfRegions =
+                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
+            int32_t afRegions[5];
+            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
+            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_STATE: {
+            uint8_t  *afState =
+               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
+            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_TRIGGER_ID: {
+            int32_t  *afTriggerId =
+                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
+            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
+            break;
+        }
+        case CAM_INTF_PARM_WHITE_BALANCE: {
+           uint8_t  *whiteBalance =
+                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
+             uint8_t fwkWhiteBalanceMode =
+                    lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                    sizeof(WHITE_BALANCE_MODES_MAP)/
+                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
+             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
+                 &fwkWhiteBalanceMode, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
+             break;
+        }
+        case CAM_INTF_META_AWB_REGIONS: {
+           /*awb regions*/
+           cam_area_t  *hAwbRegions =
+               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
+           int32_t awbRegions[5];
+           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
+           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
+           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
+           break;
+        }
+        case CAM_INTF_META_AWB_STATE: {
+           uint8_t  *whiteBalanceState =
+              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
+           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
+           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
+           break;
+        }
+        case CAM_INTF_META_MODE: {
+            uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
+            camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
+            break;
+        }
+        default:
+            ALOGV("%s: Normal Metadata %d, do not process",
+              __func__, curr_entry);
+       }
+       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
+       curr_entry = next_entry;
+    }
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
  * FUNCTION   : convertToRegions
  *
  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
@@ -2573,6 +2683,11 @@
     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
                       availableVstabModes, sizeof(availableVstabModes));
 
+    /** Quirk for urgent 3A state until final interface is worked out */
+    uint8_t usePartialResultQuirk = 1;
+    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
+                      &usePartialResultQuirk, 1);
+
     /*HAL 1 and HAL 3 common*/
     float maxZoom = 4;
     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
diff --git a/QCamera2/HAL3/QCamera3HWI.h b/QCamera2/HAL3/QCamera3HWI.h
index cc4cc61..fdba544 100755
--- a/QCamera2/HAL3/QCamera3HWI.h
+++ b/QCamera2/HAL3/QCamera3HWI.h
@@ -125,6 +125,9 @@
 
     int setFrameParameters(camera3_capture_request_t *request, uint32_t streamTypeMask);
     int translateMetadataToParameters(const camera3_capture_request_t *request);
+    camera_metadata_t* translateCbUrgentMetadataToResultMetadata (
+                             metadata_buffer_t *metadata);
+
     camera_metadata_t* translateCbMetadataToResultMetadata(metadata_buffer_t *metadata,
                             nsecs_t timestamp, int32_t request_id, int32_t BlobRequest,
                             jpeg_settings_t* InputJpegSettings);
@@ -210,6 +213,8 @@
         List<RequestedBufferInfo> buffers;
         int blob_request;
         jpeg_settings_t input_jpeg_settings;
+        nsecs_t timestamp;
+        uint8_t bNotified;
         int input_buffer_present;
     } PendingRequestInfo;
     typedef struct {
diff --git a/QCamera2/stack/common/cam_intf.h b/QCamera2/stack/common/cam_intf.h
index 9753f44..7f5c75e 100755
--- a/QCamera2/stack/common/cam_intf.h
+++ b/QCamera2/stack/common/cam_intf.h
@@ -497,9 +497,11 @@
     INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE,         cam_frame_idx_range_t,       1);
     /* Specific to HAL3 */
     INCLUDE(CAM_INTF_META_FRAME_NUMBER_VALID,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,    int32_t,                     1);
     INCLUDE(CAM_INTF_META_FRAME_DROPPED,                cam_frame_dropped_t,         1);
     INCLUDE(CAM_INTF_META_PENDING_REQUESTS,             uint32_t,                    1);
     INCLUDE(CAM_INTF_META_FRAME_NUMBER,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER,          uint32_t,                    1);
     INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE,           uint8_t,                     1);
     INCLUDE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM,      cam_color_correct_matrix_t,  1);
     INCLUDE(CAM_INTF_META_COLOR_CORRECT_GAINS,          cam_color_correct_gains_t,   1);
diff --git a/QCamera2/stack/common/cam_types.h b/QCamera2/stack/common/cam_types.h
index 6e82822..60c1941 100755
--- a/QCamera2/stack/common/cam_types.h
+++ b/QCamera2/stack/common/cam_types.h
@@ -885,6 +885,8 @@
     /* specific to HAL3 */
     /* Whether the metadata maps to a valid frame number */
     CAM_INTF_META_FRAME_NUMBER_VALID,
+    /* Whether the urgent metadata maps to a valid frame number */
+    CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,
     /* Whether the stream buffer corresponding this frame is dropped or not */
     CAM_INTF_META_FRAME_DROPPED,
     /* Number of pending requests yet to be processed */
@@ -905,6 +907,9 @@
     /* A frame counter set by the framework. Must be maintained unchanged in
      * output frame. */
     CAM_INTF_META_FRAME_NUMBER,
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_URGENT_FRAME_NUMBER,
     /*Number of streams and size of streams in current configuration*/
     CAM_INTF_META_STREAM_INFO,
     /* List of areas to use for metering */