Merge "Camera3: Query camera during initialization" into klp-dev
diff --git a/QCamera2/HAL3/QCamera3Channel.cpp b/QCamera2/HAL3/QCamera3Channel.cpp
index 4b303ba..3ec5834 100755
--- a/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/QCamera2/HAL3/QCamera3Channel.cpp
@@ -703,7 +703,7 @@
     mMemory = NULL;
 }
 
-int QCamera3RegularChannel::kMaxBuffers = 4;
+int QCamera3RegularChannel::kMaxBuffers = 7;
 
 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
                     mm_camera_ops_t *cam_ops,
diff --git a/QCamera2/HAL3/QCamera3HWI.cpp b/QCamera2/HAL3/QCamera3HWI.cpp
index 5e2b0ac..1f06a67 100644
--- a/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/QCamera2/HAL3/QCamera3HWI.cpp
@@ -151,6 +151,7 @@
     reserved:                           {0},
 };
 
+int QCamera3HardwareInterface::kMaxInFlight = 5;
 
 /*===========================================================================
  * FUNCTION   : QCamera3HardwareInterface
@@ -922,6 +923,340 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : handleMetadataWithLock
+ *
+ * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @metadata_buf: metadata buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleMetadataWithLock(
+    mm_camera_super_buf_t *metadata_buf)
+{
+    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
+    int32_t frame_number_valid = *(int32_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
+    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
+        CAM_INTF_META_PENDING_REQUESTS, metadata);
+    uint32_t frame_number = *(uint32_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
+    const struct timeval *tv = (const struct timeval *)
+        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
+    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
+        tv->tv_usec * NSEC_PER_USEC;
+    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
+
+    if (!frame_number_valid) {
+        ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
+        mMetadataChannel->bufDone(metadata_buf);
+        free(metadata_buf);
+        goto done_metadata;
+    }
+    ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
+            frame_number, capture_time);
+
+    // Go through the pending requests info and send shutter/results to frameworks
+    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
+        camera3_capture_result_t result;
+        camera3_notify_msg_t notify_msg;
+        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
+
+        // Flush out all entries with less or equal frame numbers.
+
+        //TODO: Make sure shutter timestamp really reflects shutter timestamp.
+        //Right now it's the same as metadata timestamp
+
+        //TODO: When there is metadata drop, how do we derive the timestamp of
+        //dropped frames? For now, we fake the dropped timestamp by substracting
+        //from the reported timestamp
+        nsecs_t current_capture_time = capture_time -
+            (frame_number - i->frame_number) * NSEC_PER_33MSEC;
+
+        // Send shutter notify to frameworks
+        notify_msg.type = CAMERA3_MSG_SHUTTER;
+        notify_msg.message.shutter.frame_number = i->frame_number;
+        notify_msg.message.shutter.timestamp = current_capture_time;
+        mCallbackOps->notify(mCallbackOps, &notify_msg);
+        ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
+                i->frame_number, capture_time);
+        mPendingRequest--;
+
+        // Check whether any stream buffer corresponding to this is dropped or not
+        // If dropped, then send the ERROR_BUFFER for the corresponding stream
+        if (cam_frame_drop.frame_dropped) {
+            camera3_notify_msg_t notify_msg;
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
+                uint32_t streamTypeMask = channel->getStreamTypeMask();
+                if (streamTypeMask & cam_frame_drop.stream_type_mask) {
+                    // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
+                    ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
+                           __func__, i->frame_number, streamTypeMask);
+                    notify_msg.type = CAMERA3_MSG_ERROR;
+                    notify_msg.message.error.frame_number = i->frame_number;
+                    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
+                    notify_msg.message.error.error_stream = j->stream;
+                    mCallbackOps->notify(mCallbackOps, &notify_msg);
+                    ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
+                           __func__, i->frame_number, streamTypeMask);
+                    PendingFrameDropInfo PendingFrameDrop;
+                    PendingFrameDrop.frame_number=i->frame_number;
+                    PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
+                    // Add the Frame drop info to mPendingFrameDropList
+                    mPendingFrameDropList.push_back(PendingFrameDrop);
+                }
+            }
+        }
+
+        // Send empty metadata with already filled buffers for dropped metadata
+        // and send valid metadata with already filled buffers for current metadata
+        if (i->frame_number < frame_number) {
+            CameraMetadata dummyMetadata;
+            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
+                    &current_capture_time, 1);
+            dummyMetadata.update(ANDROID_REQUEST_ID,
+                    &(i->request_id), 1);
+            result.result = dummyMetadata.release();
+        } else {
+            result.result = translateCbMetadataToResultMetadata(metadata,
+                    current_capture_time, i->request_id, i->blob_request,
+                    &(i->input_jpeg_settings));
+            if (mIsZslMode) {
+                int found_metadata = 0;
+                //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
+                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+                    if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
+                        //check if corresp. zsl already exists in the stored metadata list
+                        for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
+                                m != mStoredMetadataList.begin(); m++) {
+                            if (m->frame_number == frame_number) {
+                                m->meta_buf = metadata_buf;
+                                found_metadata = 1;
+                                break;
+                            }
+                        }
+                        if (!found_metadata) {
+                            MetadataBufferInfo store_meta_info;
+                            store_meta_info.meta_buf = metadata_buf;
+                            store_meta_info.frame_number = frame_number;
+                            mStoredMetadataList.push_back(store_meta_info);
+                            found_metadata = 1;
+                        }
+                    }
+                }
+                if (!found_metadata) {
+                    if (!i->input_buffer_present && i->blob_request) {
+                        //livesnapshot or fallback non-zsl snapshot case
+                        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                                j != i->buffers.end(); j++){
+                            if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
+                                j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+                                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
+                                break;
+                            }
+                        }
+                    } else {
+                        //return the metadata immediately
+                        mMetadataChannel->bufDone(metadata_buf);
+                        free(metadata_buf);
+                    }
+                }
+            } else if (!mIsZslMode && i->blob_request) {
+                //If it is a blob request then send the metadata to the picture channel
+                mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
+            } else {
+                // Return metadata buffer
+                mMetadataChannel->bufDone(metadata_buf);
+                free(metadata_buf);
+            }
+        }
+        if (!result.result) {
+            ALOGE("%s: metadata is NULL", __func__);
+        }
+        result.frame_number = i->frame_number;
+        result.num_output_buffers = 0;
+        result.output_buffers = NULL;
+        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+            if (j->buffer) {
+                result.num_output_buffers++;
+            }
+        }
+
+        if (result.num_output_buffers > 0) {
+            camera3_stream_buffer_t *result_buffers =
+                new camera3_stream_buffer_t[result.num_output_buffers];
+            if (!result_buffers) {
+                ALOGE("%s: Fatal error: out of memory", __func__);
+            }
+            size_t result_buffers_idx = 0;
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+                if (j->buffer) {
+                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                            m != mPendingFrameDropList.end(); m++) {
+                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
+                        uint32_t streamTypeMask = channel->getStreamTypeMask();
+                        if((m->stream_type_mask & streamTypeMask) &&
+                                (m->frame_number==frame_number)) {
+                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
+                                  __func__, frame_number, streamTypeMask);
+                            m = mPendingFrameDropList.erase(m);
+                            break;
+                        }
+                    }
+                    result_buffers[result_buffers_idx++] = *(j->buffer);
+                    free(j->buffer);
+                    j->buffer = NULL;
+                    mPendingBuffersMap.editValueFor(j->stream)--;
+                }
+            }
+            result.output_buffers = result_buffers;
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
+                    __func__, result.frame_number, current_capture_time);
+            free_camera_metadata((camera_metadata_t *)result.result);
+            delete[] result_buffers;
+        } else {
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
+                        __func__, result.frame_number, current_capture_time);
+            free_camera_metadata((camera_metadata_t *)result.result);
+        }
+        // erase the element from the list
+        i = mPendingRequestsList.erase(i);
+    }
+
+done_metadata:
+    if (!pending_requests)
+        unblockRequestIfNecessary();
+
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBufferWithLock
+ *
+ * DESCRIPTION: Handles image buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @buffer: image buffer for the callback
+ *              @frame_number: frame number of the image buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBufferWithLock(
+    camera3_stream_buffer_t *buffer, uint32_t frame_number)
+{
+    // If the frame number doesn't exist in the pending request list,
+    // directly send the buffer to the frameworks, and update pending buffers map
+    // Otherwise, book-keep the buffer.
+    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
+        i++;
+    }
+    if (i == mPendingRequestsList.end()) {
+        // Verify all pending requests frame_numbers are greater
+        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
+                j != mPendingRequestsList.end(); j++) {
+            if (j->frame_number < frame_number) {
+                ALOGE("%s: Error: pending frame number %d is smaller than %d",
+                        __func__, j->frame_number, frame_number);
+            }
+        }
+        camera3_capture_result_t result;
+        result.result = NULL;
+        result.frame_number = frame_number;
+        result.num_output_buffers = 1;
+        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                m != mPendingFrameDropList.end(); m++) {
+            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
+            uint32_t streamTypeMask = channel->getStreamTypeMask();
+            if((m->stream_type_mask & streamTypeMask) &&
+                (m->frame_number==frame_number) ) {
+                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
+                        __func__, frame_number, streamTypeMask);
+                m = mPendingFrameDropList.erase(m);
+                break;
+            }
+        }
+        result.output_buffers = buffer;
+        ALOGV("%s: result frame_number = %d, buffer = %p",
+                __func__, frame_number, buffer);
+        mPendingBuffersMap.editValueFor(buffer->stream)--;
+        if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
+            int found = 0;
+            for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
+                k != mStoredMetadataList.end(); k++) {
+                if (k->frame_number == frame_number) {
+                    k->zsl_buf_hdl = buffer->buffer;
+                    found = 1;
+                    break;
+                }
+            }
+            if (!found) {
+                MetadataBufferInfo meta_info;
+                meta_info.frame_number = frame_number;
+                meta_info.zsl_buf_hdl = buffer->buffer;
+                mStoredMetadataList.push_back(meta_info);
+            }
+        }
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+    } else {
+        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                j != i->buffers.end(); j++) {
+            if (j->stream == buffer->stream) {
+                if (j->buffer != NULL) {
+                    ALOGE("%s: Error: buffer is already set", __func__);
+                } else {
+                    j->buffer = (camera3_stream_buffer_t *)malloc(
+                            sizeof(camera3_stream_buffer_t));
+                    *(j->buffer) = *buffer;
+                    ALOGV("%s: cache buffer %p at result frame_number %d",
+                            __func__, buffer, frame_number);
+                }
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : unblockRequestIfNecessary
+ *
+ * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
+ *              that mMutex is held when this function is called.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::unblockRequestIfNecessary()
+{
+    bool max_buffers_dequeued = false;
+    for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
+        const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
+        uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
+        if (queued_buffers == stream->max_buffers) {
+            max_buffers_dequeued = true;
+            break;
+        }
+    }
+    if (!max_buffers_dequeued) {
+        // Unblock process_capture_request
+        pthread_cond_signal(&mRequestCond);
+    }
+}
+
+/*===========================================================================
  * FUNCTION   : registerStreamBuffers
  *
  * DESCRIPTION: Register buffers for a given stream with the HAL device.
@@ -1209,8 +1544,8 @@
         ts.tv_sec += 5;
     }
     //Block on conditional variable
-    mPendingRequest = 1;
-    while (mPendingRequest == 1) {
+    mPendingRequest++;
+    do {
         if (!isValidTimeout) {
             ALOGV("%s: Blocking on conditional wait", __func__);
             pthread_cond_wait(&mRequestCond, &mMutex);
@@ -1225,7 +1560,7 @@
             }
         }
         ALOGV("%s: Unblocked", __func__);
-    }
+    }while (mPendingRequest >= kMaxInFlight);
 
     pthread_mutex_unlock(&mMutex);
 
@@ -1314,296 +1649,11 @@
 {
     pthread_mutex_lock(&mMutex);
 
-    if (metadata_buf) {
-        metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
-        int32_t frame_number_valid = *(int32_t *)
-            POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
-        uint32_t pending_requests = *(uint32_t *)POINTER_OF(
-            CAM_INTF_META_PENDING_REQUESTS, metadata);
-        uint32_t frame_number = *(uint32_t *)
-            POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
-        const struct timeval *tv = (const struct timeval *)
-            POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
-        nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
-            tv->tv_usec * NSEC_PER_USEC;
-        cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
-            POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
+    if (metadata_buf)
+        handleMetadataWithLock(metadata_buf);
+    else
+        handleBufferWithLock(buffer, frame_number);
 
-        if (!frame_number_valid) {
-            ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
-            mMetadataChannel->bufDone(metadata_buf);
-            free(metadata_buf);
-            goto done_metadata;
-        }
-        ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
-                frame_number, capture_time);
-
-        // Go through the pending requests info and send shutter/results to frameworks
-        for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
-                i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
-            camera3_capture_result_t result;
-            camera3_notify_msg_t notify_msg;
-            ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
-
-            // Flush out all entries with less or equal frame numbers.
-
-            //TODO: Make sure shutter timestamp really reflects shutter timestamp.
-            //Right now it's the same as metadata timestamp
-
-            //TODO: When there is metadata drop, how do we derive the timestamp of
-            //dropped frames? For now, we fake the dropped timestamp by substracting
-            //from the reported timestamp
-            nsecs_t current_capture_time = capture_time -
-                (frame_number - i->frame_number) * NSEC_PER_33MSEC;
-
-            // Send shutter notify to frameworks
-            notify_msg.type = CAMERA3_MSG_SHUTTER;
-            notify_msg.message.shutter.frame_number = i->frame_number;
-            notify_msg.message.shutter.timestamp = current_capture_time;
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
-            ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
-                    i->frame_number, capture_time);
-
-            // Check whether any stream buffer corresponding to this is dropped or not
-            // If dropped, then send the ERROR_BUFFER for the corresponding stream
-            if (cam_frame_drop.frame_dropped) {
-                camera3_notify_msg_t notify_msg;
-                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                        j != i->buffers.end(); j++) {
-                    QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
-                    uint32_t streamTypeMask = channel->getStreamTypeMask();
-                    if (streamTypeMask & cam_frame_drop.stream_type_mask) {
-                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
-                        ALOGV("%s: Start of reporting error frame#=%d, streamMask=%d",
-                               __func__, i->frame_number, streamTypeMask);
-                        notify_msg.type = CAMERA3_MSG_ERROR;
-                        notify_msg.message.error.frame_number = i->frame_number;
-                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
-                        notify_msg.message.error.error_stream = j->stream;
-                        mCallbackOps->notify(mCallbackOps, &notify_msg);
-                        ALOGV("%s: End of reporting error frame#=%d, streamMask=%d",
-                               __func__, i->frame_number, streamTypeMask);
-                        PendingFrameDropInfo PendingFrameDrop;
-                        PendingFrameDrop.frame_number=i->frame_number;
-                        PendingFrameDrop.stream_type_mask = cam_frame_drop.stream_type_mask;
-                        // Add the Frame drop info to mPendingFrameDropList
-                        mPendingFrameDropList.push_back(PendingFrameDrop);
-                    }
-                }
-            }
-
-            // Send empty metadata with already filled buffers for dropped metadata
-            // and send valid metadata with already filled buffers for current metadata
-            if (i->frame_number < frame_number) {
-                CameraMetadata dummyMetadata;
-                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
-                        &current_capture_time, 1);
-                dummyMetadata.update(ANDROID_REQUEST_ID,
-                        &(i->request_id), 1);
-                result.result = dummyMetadata.release();
-            } else {
-                result.result = translateCbMetadataToResultMetadata(metadata,
-                        current_capture_time, i->request_id, i->blob_request,
-                        &(i->input_jpeg_settings));
-                if (mIsZslMode) {
-                   int found_metadata = 0;
-                   //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
-                   for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                        j != i->buffers.end(); j++) {
-                      if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
-                         //check if corresp. zsl already exists in the stored metadata list
-                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
-                               m != mStoredMetadataList.begin(); m++) {
-                            if (m->frame_number == frame_number) {
-                               m->meta_buf = metadata_buf;
-                               found_metadata = 1;
-                               break;
-                            }
-                         }
-                         if (!found_metadata) {
-                            MetadataBufferInfo store_meta_info;
-                            store_meta_info.meta_buf = metadata_buf;
-                            store_meta_info.frame_number = frame_number;
-                            mStoredMetadataList.push_back(store_meta_info);
-                            found_metadata = 1;
-                         }
-                      }
-                   }
-                   if (!found_metadata) {
-                       if (!i->input_buffer_present && i->blob_request) {
-                          //livesnapshot or fallback non-zsl snapshot case
-                          for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                                j != i->buffers.end(); j++){
-                              if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
-                                  j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
-                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
-                                 break;
-                              }
-                         }
-                       } else {
-                            //return the metadata immediately
-                            mMetadataChannel->bufDone(metadata_buf);
-                            free(metadata_buf);
-                       }
-                   }
-               } else if (!mIsZslMode && i->blob_request) {
-                   //If it is a blob request then send the metadata to the picture channel
-                   mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
-               } else {
-                   // Return metadata buffer
-                   mMetadataChannel->bufDone(metadata_buf);
-                   free(metadata_buf);
-               }
-
-            }
-            if (!result.result) {
-                ALOGE("%s: metadata is NULL", __func__);
-            }
-            result.frame_number = i->frame_number;
-            result.num_output_buffers = 0;
-            result.output_buffers = NULL;
-            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                    j != i->buffers.end(); j++) {
-                if (j->buffer) {
-                    result.num_output_buffers++;
-                }
-            }
-
-            if (result.num_output_buffers > 0) {
-                camera3_stream_buffer_t *result_buffers =
-                    new camera3_stream_buffer_t[result.num_output_buffers];
-                if (!result_buffers) {
-                    ALOGE("%s: Fatal error: out of memory", __func__);
-                }
-                size_t result_buffers_idx = 0;
-                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                     j != i->buffers.end(); j++) {
-                     if (j->buffer) {
-                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
-                              m != mPendingFrameDropList.end(); m++) {
-                              QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
-                              uint32_t streamTypeMask = channel->getStreamTypeMask();
-                              if((m->stream_type_mask & streamTypeMask) &&
-                                  (m->frame_number==frame_number)) {
-                                  j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
-                                  ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
-                                        __func__, frame_number, streamTypeMask);
-                                  m = mPendingFrameDropList.erase(m);
-                                  break;
-                              }
-                         }
-                         result_buffers[result_buffers_idx++] = *(j->buffer);
-                         free(j->buffer);
-                         j->buffer = NULL;
-                         mPendingBuffersMap.editValueFor(j->stream)--;
-                    }
-                }
-                result.output_buffers = result_buffers;
-
-                mCallbackOps->process_capture_result(mCallbackOps, &result);
-                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
-                        __func__, result.frame_number, current_capture_time);
-                free_camera_metadata((camera_metadata_t *)result.result);
-                delete[] result_buffers;
-            } else {
-                mCallbackOps->process_capture_result(mCallbackOps, &result);
-                ALOGV("%s: meta frame_number = %d, capture_time = %lld",
-                        __func__, result.frame_number, current_capture_time);
-                free_camera_metadata((camera_metadata_t *)result.result);
-            }
-            // erase the element from the list
-            i = mPendingRequestsList.erase(i);
-        }
-
-
-done_metadata:
-        bool max_buffers_dequeued = false;
-        for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
-            const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
-            uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
-            if (queued_buffers == stream->max_buffers) {
-                max_buffers_dequeued = true;
-                break;
-            }
-        }
-        if (!max_buffers_dequeued && !pending_requests) {
-            // Unblock process_capture_request
-            mPendingRequest = 0;
-            pthread_cond_signal(&mRequestCond);
-        }
-    } else {
-        // If the frame number doesn't exist in the pending request list,
-        // directly send the buffer to the frameworks, and update pending buffers map
-        // Otherwise, book-keep the buffer.
-        List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
-        while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
-            i++;
-        }
-        if (i == mPendingRequestsList.end()) {
-            // Verify all pending requests frame_numbers are greater
-            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
-                    j != mPendingRequestsList.end(); j++) {
-                if (j->frame_number < frame_number) {
-                    ALOGE("%s: Error: pending frame number %d is smaller than %d",
-                            __func__, j->frame_number, frame_number);
-                }
-            }
-            camera3_capture_result_t result;
-            result.result = NULL;
-            result.frame_number = frame_number;
-            result.num_output_buffers = 1;
-            for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
-                  m != mPendingFrameDropList.end(); m++) {
-                QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
-                uint32_t streamTypeMask = channel->getStreamTypeMask();
-                if((m->stream_type_mask & streamTypeMask) &&
-                    (m->frame_number==frame_number) ) {
-                    buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
-                    ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamTypeMask=%d",
-                            __func__, frame_number, streamTypeMask);
-                    m = mPendingFrameDropList.erase(m);
-                    break;
-                }
-            }
-            result.output_buffers = buffer;
-            ALOGV("%s: result frame_number = %d, buffer = %p",
-                    __func__, frame_number, buffer);
-            mPendingBuffersMap.editValueFor(buffer->stream)--;
-            if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
-                int found = 0;
-                for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
-                      k != mStoredMetadataList.end(); k++) {
-                    if (k->frame_number == frame_number) {
-                        k->zsl_buf_hdl = buffer->buffer;
-                        found = 1;
-                        break;
-                    }
-                }
-                if (!found) {
-                   MetadataBufferInfo meta_info;
-                   meta_info.frame_number = frame_number;
-                   meta_info.zsl_buf_hdl = buffer->buffer;
-                   mStoredMetadataList.push_back(meta_info);
-                }
-            }
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
-        } else {
-            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                    j != i->buffers.end(); j++) {
-                if (j->stream == buffer->stream) {
-                    if (j->buffer != NULL) {
-                        ALOGE("%s: Error: buffer is already set", __func__);
-                    } else {
-                        j->buffer = (camera3_stream_buffer_t *)malloc(
-                                sizeof(camera3_stream_buffer_t));
-                        *(j->buffer) = *buffer;
-                        ALOGV("%s: cache buffer %p at result frame_number %d",
-                                __func__, buffer, frame_number);
-                    }
-                }
-            }
-        }
-    }
     pthread_mutex_unlock(&mMutex);
     return;
 }
diff --git a/QCamera2/HAL3/QCamera3HWI.h b/QCamera2/HAL3/QCamera3HWI.h
index 5ffc8d2..27717db 100755
--- a/QCamera2/HAL3/QCamera3HWI.h
+++ b/QCamera2/HAL3/QCamera3HWI.h
@@ -163,6 +163,11 @@
 
     void deriveMinFrameDuration();
     int64_t getMinFrameDuration(const camera3_capture_request_t *request);
+
+    void handleMetadataWithLock(mm_camera_super_buf_t *metadata_buf);
+    void handleBufferWithLock(camera3_stream_buffer_t *buffer,
+        uint32_t frame_number);
+    void unblockRequestIfNecessary();
 public:
 
     bool needOnlineRotation();
@@ -170,6 +175,8 @@
     int getJpegQuality();
     int calcMaxJpegSize();
     QCamera3Exif *getExifData();
+public:
+    static int kMaxInFlight;
 private:
     camera3_device_t   mCameraDevice;
     uint8_t            mCameraId;