Apply EVS interface changes

This commit applies below EVS interface changes.
- Rename EvsEvent as EvsEventDesc.
- doneWithFrame_1_1() takes a vector of buffers to be returned.
- deliverFrame_1_1() sends a vector of buffers.
- getIntParameter() and setIntParameter() returns a vector of
  integer values.

Bug: 142275664
Test: VtsHalEvsV1_1TargetTest
Change-Id: Id99f04770953d07d1360f823af079ecf3c96f31d
Signed-off-by: Changyeon Jo <changyeon@google.com>
diff --git a/evs/apps/default/StreamHandler.cpp b/evs/apps/default/StreamHandler.cpp
index 9a1b4f0..125d76a 100644
--- a/evs/apps/default/StreamHandler.cpp
+++ b/evs/apps/default/StreamHandler.cpp
@@ -123,7 +123,10 @@
     }
 
     // Send the buffer back to the underlying camera
-    mCamera->doneWithFrame_1_1(mBuffers[mHeldBuffer]);
+    hidl_vec<BufferDesc_1_1> frames;
+    frames.resize(1);
+    frames[0] = mBuffers[mHeldBuffer];
+    mCamera->doneWithFrame_1_1(frames);
 
     // Clear the held position
     mHeldBuffer = -1;
@@ -138,12 +141,12 @@
 }
 
 
-Return<void> StreamHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
-    ALOGD("Received a frame event from the camera (%p)",
-          bufDesc.buffer.nativeHandle.getNativeHandle());
+Return<void> StreamHandler::deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
+    ALOGD("Received frames from the camera");
 
     // Take the lock to protect our frame slots and running state variable
     std::unique_lock <std::mutex> lock(mLock);
+    BufferDesc_1_1 bufDesc = buffers[0];
     if (bufDesc.buffer.nativeHandle.getNativeHandle() == nullptr) {
         // Signal that the last frame has been received and the stream is stopped
         ALOGW("Invalid null frame (id: 0x%X) is ignored", bufDesc.bufferId);
@@ -151,7 +154,10 @@
         // Do we already have a "ready" frame?
         if (mReadyBuffer >= 0) {
             // Send the previously saved buffer back to the camera unused
-            mCamera->doneWithFrame_1_1(mBuffers[mReadyBuffer]);
+            hidl_vec<BufferDesc_1_1> frames;
+            frames.resize(1);
+            frames[0] = mBuffers[mReadyBuffer];
+            mCamera->doneWithFrame_1_1(frames);
 
             // We'll reuse the same ready buffer index
         } else if (mHeldBuffer >= 0) {
@@ -174,7 +180,7 @@
 }
 
 
-Return<void> StreamHandler::notify(const EvsEvent& event) {
+Return<void> StreamHandler::notify(const EvsEventDesc& event) {
     switch(event.aType) {
         case EvsEventType::STREAM_STOPPED:
         {
diff --git a/evs/apps/default/StreamHandler.h b/evs/apps/default/StreamHandler.h
index 9841385..ee23227 100644
--- a/evs/apps/default/StreamHandler.h
+++ b/evs/apps/default/StreamHandler.h
@@ -66,8 +66,8 @@
     Return<void> deliverFrame(const BufferDesc_1_0& buffer)  override;
 
     // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
-    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer)  override;
-    Return<void> notify(const EvsEvent& event) override;
+    Return<void> deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer)  override;
+    Return<void> notify(const EvsEventDesc& event) override;
 
     // Values initialized as startup
     android::sp <IEvsCamera>    mCamera;
diff --git a/evs/manager/1.1/HalCamera.cpp b/evs/manager/1.1/HalCamera.cpp
index 72f1ea2..25712ba 100644
--- a/evs/manager/1.1/HalCamera.cpp
+++ b/evs/manager/1.1/HalCamera.cpp
@@ -195,7 +195,10 @@
         mFrames[i].refCount--;
         if (mFrames[i].refCount <= 0) {
             // Since all our clients are done with this buffer, return it to the device layer
-            mHwCamera->doneWithFrame_1_1(buffer);
+            hardware::hidl_vec<BufferDesc_1_1> returnedBuffers;
+            returnedBuffers.resize(1);
+            returnedBuffers[0] = buffer;
+            mHwCamera->doneWithFrame_1_1(returnedBuffers);
         }
     }
 
@@ -217,13 +220,13 @@
 
 
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCameraStream follow.
-Return<void> HalCamera::deliverFrame_1_1(const BufferDesc_1_1& buffer) {
+Return<void> HalCamera::deliverFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1>& buffer) {
     ALOGV("Received a frame");
     unsigned frameDeliveries = 0;
     for (auto&& client : mClients) {
         sp<VirtualCamera> vCam = client.promote();
         if (vCam != nullptr) {
-            if (vCam->deliverFrame(buffer)) {
+            if (vCam->deliverFrame(buffer[0])) {
                 ++frameDeliveries;
             }
         }
@@ -244,9 +247,9 @@
         }
 
         if (i == mFrames.size()) {
-            mFrames.emplace_back(buffer.bufferId);
+            mFrames.emplace_back(buffer[0].bufferId);
         } else {
-            mFrames[i].frameId = buffer.bufferId;
+            mFrames[i].frameId = buffer[0].bufferId;
         }
         mFrames[i].refCount = frameDeliveries;
     }
@@ -255,7 +258,7 @@
 }
 
 
-Return<void> HalCamera::notify(const EvsEvent& event) {
+Return<void> HalCamera::notify(const EvsEventDesc& event) {
     ALOGD("Received an event id: %u", event.aType);
     if(event.aType == EvsEventType::STREAM_STOPPED) {
         // This event happens only when there is no more active client.
@@ -305,7 +308,7 @@
                 virtualCamera.get(), prevMaster.get());
 
             /* Notify a previous master client the loss of a master role */
-            EvsEvent event;
+            EvsEventDesc event;
             event.aType = EvsEventType::MASTER_RELEASED;
             if (!prevMaster->notify(event)) {
                 ALOGE("Fail to deliver a master role lost notification");
@@ -326,7 +329,7 @@
         mMaster = nullptr;
 
         /* Notify other clients that a master role becomes available. */
-        EvsEvent event;
+        EvsEventDesc event;
         event.aType = EvsEventType::MASTER_RELEASED;
         auto cbResult = this->notify(event);
         if (!cbResult.isOk()) {
@@ -345,12 +348,12 @@
         mHwCamera->setIntParameter(id, value,
                                    [&result, &value](auto status, auto readValue) {
                                        result = status;
-                                       value = readValue;
+                                       value = readValue[0];
                                    });
 
         if (result == EvsResult::OK) {
             /* Notify a parameter change */
-            EvsEvent event;
+            EvsEventDesc event;
             event.aType = EvsEventType::PARAMETER_CHANGED;
             event.payload[0] = static_cast<uint32_t>(id);
             event.payload[1] = static_cast<uint32_t>(value);
@@ -375,7 +378,7 @@
     mHwCamera->getIntParameter(id, [&result, &value](auto status, auto readValue) {
                                        result = status;
                                        if (result == EvsResult::OK) {
-                                           value = readValue;
+                                           value = readValue[0];
                                        }
     });
 
diff --git a/evs/manager/1.1/HalCamera.h b/evs/manager/1.1/HalCamera.h
index c85d0f1..ab35a1f 100644
--- a/evs/manager/1.1/HalCamera.h
+++ b/evs/manager/1.1/HalCamera.h
@@ -79,8 +79,8 @@
     Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
 
     // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCameraStream follow.
-    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
-    Return<void> notify(const EvsEvent& event) override;
+    Return<void> deliverFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1>& buffer) override;
+    Return<void> notify(const EvsEventDesc& event) override;
 
 private:
     sp<IEvsCamera_1_1>              mHwCamera;
diff --git a/evs/manager/1.1/VirtualCamera.cpp b/evs/manager/1.1/VirtualCamera.cpp
index 562b5d3..b6c4513 100644
--- a/evs/manager/1.1/VirtualCamera.cpp
+++ b/evs/manager/1.1/VirtualCamera.cpp
@@ -87,7 +87,8 @@
 
         if (mStream_1_1 != nullptr) {
             // Report a frame drop to v1.1 client.
-            EvsEvent event;
+            EvsEventDesc event;
+            event.deviceId = bufDesc.deviceId;
             event.aType = EvsEventType::FRAME_DROPPED;
             auto result = mStream_1_1->notify(event);
             if (!result.isOk()) {
@@ -100,24 +101,31 @@
         // Keep a record of this frame so we can clean up if we have to in case of client death
         mFramesHeld.emplace_back(bufDesc);
 
-        if (mStream_1_1 != nullptr) {
-            // Pass this buffer through to our client
-            mStream_1_1->deliverFrame_1_1(bufDesc);
-        } else {
-            // Forward a frame to v1.0 client
-            BufferDesc_1_0 frame_1_0 = {};
-            const AHardwareBuffer_Desc* pDesc =
-                reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
-            frame_1_0.width     = pDesc->width;
-            frame_1_0.height    = pDesc->height;
-            frame_1_0.format    = pDesc->format;
-            frame_1_0.usage     = pDesc->usage;
-            frame_1_0.stride    = pDesc->stride;
-            frame_1_0.memHandle = bufDesc.buffer.nativeHandle;
-            frame_1_0.pixelSize = bufDesc.pixelSize;
-            frame_1_0.bufferId  = bufDesc.bufferId;
+        // Forward frames if new frames from all physical devices have arrived.
+        {
+            // TODO: this is only for a test - for now, forwarding only a single frame.
+            if (mStream_1_1 != nullptr) {
+                // Pass this buffer through to our client
+                hardware::hidl_vec<BufferDesc_1_1> frames;
+                frames.resize(1);
+                frames[0] = bufDesc;
+                mStream_1_1->deliverFrame_1_1(frames);
+            } else {
+                // Forward a frame to v1.0 client
+                BufferDesc_1_0 frame_1_0 = {};
+                const AHardwareBuffer_Desc* pDesc =
+                    reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
+                frame_1_0.width     = pDesc->width;
+                frame_1_0.height    = pDesc->height;
+                frame_1_0.format    = pDesc->format;
+                frame_1_0.usage     = pDesc->usage;
+                frame_1_0.stride    = pDesc->stride;
+                frame_1_0.memHandle = bufDesc.buffer.nativeHandle;
+                frame_1_0.pixelSize = bufDesc.pixelSize;
+                frame_1_0.bufferId  = bufDesc.bufferId;
 
-            mStream->deliverFrame(frame_1_0);
+                mStream->deliverFrame(frame_1_0);
+            }
         }
 
         return true;
@@ -125,7 +133,7 @@
 }
 
 
-bool VirtualCamera::notify(const EvsEvent& event) {
+bool VirtualCamera::notify(const EvsEventDesc& event) {
     switch(event.aType) {
         case EvsEventType::STREAM_STOPPED:
             // Warn if we got an unexpected stream termination
@@ -272,7 +280,7 @@
         // Deliver an empty frame to close out the frame stream
         if (mStream_1_1 != nullptr) {
             // v1.1 client waits for a stream stopped event
-            EvsEvent event;
+            EvsEventDesc event;
             event.aType = EvsEventType::STREAM_STOPPED;
             auto result = mStream_1_1->notify(event);
             if (!result.isOk()) {
@@ -329,28 +337,33 @@
 }
 
 
-Return<EvsResult> VirtualCamera::doneWithFrame_1_1(const BufferDesc_1_1& bufDesc_1_1) {
-    if (bufDesc_1_1.buffer.nativeHandle == nullptr) {
-        ALOGE("ignoring doneWithFrame called with invalid handle");
-    } else {
-        // Find this buffer in our "held" list
-        auto it = mFramesHeld.begin();
-        while (it != mFramesHeld.end()) {
-            if (it->bufferId == bufDesc_1_1.bufferId) {
-                // found it!
-                break;
-            }
-            ++it;
-        }
-        if (it == mFramesHeld.end()) {
-            // We should always find the frame in our "held" list
-            ALOGE("Ignoring doneWithFrame called with unrecognized frameID %d", bufDesc_1_1.bufferId);
-        } else {
-            // Take this frame out of our "held" list
-            mFramesHeld.erase(it);
+Return<EvsResult> VirtualCamera::doneWithFrame_1_1(
+    const hardware::hidl_vec<BufferDesc_1_1>& buffers) {
 
-            // Tell our parent that we're done with this buffer
-            mHalCamera->doneWithFrame(bufDesc_1_1);
+    for (auto&& buffer : buffers) {
+        if (buffer.buffer.nativeHandle == nullptr) {
+            ALOGW("ignoring doneWithFrame called with invalid handle");
+        } else {
+            // Find this buffer in our "held" list
+            auto it = mFramesHeld.begin();
+            while (it != mFramesHeld.end()) {
+                if (it->bufferId == buffer.bufferId) {
+                    // found it!
+                    break;
+                }
+                ++it;
+            }
+            if (it == mFramesHeld.end()) {
+                // We should always find the frame in our "held" list
+                ALOGE("Ignoring doneWithFrame called with unrecognized frameID %d",
+                      buffer.bufferId);
+            } else {
+                // Take this frame out of our "held" list
+                mFramesHeld.erase(it);
+
+                // Tell our parent that we're done with this buffer
+                mHalCamera->doneWithFrame(buffer);
+            }
         }
     }
 
@@ -421,7 +434,11 @@
                                             int32_t value,
                                             setIntParameter_cb _hidl_cb) {
     EvsResult status = mHalCamera->setParameter(this, id, value);
-    _hidl_cb(status, value);
+
+    hardware::hidl_vec<int32_t> values;
+    values.resize(1);
+    values[0] = value;
+    _hidl_cb(status, values);
 
     return Void();
 }
@@ -431,7 +448,11 @@
                                             getIntParameter_cb _hidl_cb) {
     int32_t value;
     EvsResult status = mHalCamera->getParameter(id, value);
-    _hidl_cb(status, value);
+
+    hardware::hidl_vec<int32_t> values;
+    values.resize(1);
+    values[0] = value;
+    _hidl_cb(status, values);
 
     return Void();
 }
diff --git a/evs/manager/1.1/VirtualCamera.h b/evs/manager/1.1/VirtualCamera.h
index b51920c..42cadcb 100644
--- a/evs/manager/1.1/VirtualCamera.h
+++ b/evs/manager/1.1/VirtualCamera.h
@@ -64,7 +64,7 @@
     bool              isStreaming()       { return mStreamState == RUNNING; }
 
     // Proxy to receive frames and forward them to the client's stream
-    bool              notify(const EvsEvent& event);
+    bool              notify(const EvsEventDesc& event);
     bool              deliverFrame(const BufferDesc& bufDesc);
 
     // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
@@ -78,7 +78,7 @@
 
     // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
     Return<void>      getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)  override;
-    Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<EvsResult> doneWithFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1>& buffer) override;
     Return<EvsResult> pauseVideoStream() override { return EvsResult::UNDERLYING_SERVICE_ERROR; }
     Return<EvsResult> resumeVideoStream() override { return EvsResult::UNDERLYING_SERVICE_ERROR; }
     Return<EvsResult> setMaster() override;
diff --git a/evs/sampleDriver/ConfigManager.cpp b/evs/sampleDriver/ConfigManager.cpp
index 02b6ca8..d93c93e 100644
--- a/evs/sampleDriver/ConfigManager.cpp
+++ b/evs/sampleDriver/ConfigManager.cpp
@@ -63,55 +63,32 @@
     while (curElem != nullptr) {
         if (!strcmp(curElem->Name(), "group")) {
             /* camera group identifier */
-            const char *group_id = curElem->FindAttribute("group_id")->Value();
+            const char *id = curElem->FindAttribute("id")->Value();
 
-            /* create CameraGroup */
-            unique_ptr<ConfigManager::CameraGroup> aCameraGroup(new ConfigManager::CameraGroup());
+            /* create a camera group to be filled */
+            CameraGroupInfo *aCamera = new CameraGroupInfo();
 
-            /* add a camera device to its group */
-            addCameraDevices(curElem->FindAttribute("device_id")->Value(), aCameraGroup);
-
-            /* a list of camera stream configurations */
-            const XMLElement *childElem =
-                curElem->FirstChildElement("caps")->FirstChildElement("stream");
-            while (childElem != nullptr) {
-                /* read 5 attributes */
-                const XMLAttribute *idAttr     = childElem->FindAttribute("id");
-                const XMLAttribute *widthAttr  = childElem->FindAttribute("width");
-                const XMLAttribute *heightAttr = childElem->FindAttribute("height");
-                const XMLAttribute *fmtAttr    = childElem->FindAttribute("format");
-                const XMLAttribute *fpsAttr    = childElem->FindAttribute("framerate");
-
-                const int32_t id = stoi(idAttr->Value());
-                int32_t framerate = 0;
-                if (fpsAttr != nullptr) {
-                    framerate = stoi(fpsAttr->Value());
-                }
-
-                int32_t pixFormat;
-                if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
-                                                            pixFormat)) {
-                    RawStreamConfiguration cfg = {
-                        id,
-                        stoi(widthAttr->Value()),
-                        stoi(heightAttr->Value()),
-                        pixFormat,
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
-                        framerate
-                    };
-                    aCameraGroup->streamConfigurations.insert_or_assign(id, cfg);
-                }
-
-                childElem = childElem->NextSiblingElement("stream");
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
             }
 
             /* camera group synchronization */
             const char *sync = curElem->FindAttribute("synchronized")->Value();
-            aCameraGroup->synchronized =
-                static_cast<bool>(strcmp(sync, "false"));
+            if (!strcmp(sync, "CALIBRATED")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED;
+            } else if (!strcmp(sync, "APPROXIMATE")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE;
+            } else {
+                aCamera->synchronized = 0; // Not synchronized
+            }
 
             /* add a group to hash map */
-            mCameraGroups.insert_or_assign(group_id, std::move(aCameraGroup));
+            mCameraGroups.insert_or_assign(id, unique_ptr<CameraGroupInfo>(aCamera));
         } else if (!strcmp(curElem->Name(), "device")) {
             /* camera unique identifier */
             const char *id = curElem->FindAttribute("id")->Value();
@@ -119,8 +96,18 @@
             /* camera mount location */
             const char *pos = curElem->FindAttribute("position")->Value();
 
+            /* create a camera device to be filled */
+            CameraInfo *aCamera = new CameraInfo();
+
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
+            }
+
             /* store read camera module information */
-            mCameraInfo.insert_or_assign(id, readCameraDeviceInfo(curElem));
+            mCameraInfo.insert_or_assign(id, unique_ptr<CameraInfo>(aCamera));
 
             /* assign a camera device to a position group */
             mCameraPosition[pos].emplace(id);
@@ -134,15 +121,13 @@
 }
 
 
-unique_ptr<ConfigManager::CameraInfo>
-ConfigManager::readCameraDeviceInfo(const XMLElement *aDeviceElem) {
-    if (aDeviceElem == nullptr) {
-        return nullptr;
+bool
+ConfigManager::readCameraDeviceInfo(CameraInfo *aCamera,
+                                    const XMLElement *aDeviceElem) {
+    if (aCamera == nullptr || aDeviceElem == nullptr) {
+        return false;
     }
 
-    /* create a CameraInfo to be filled */
-    unique_ptr<ConfigManager::CameraInfo> aCamera(new ConfigManager::CameraInfo());
-
     /* size information to allocate camera_metadata_t */
     size_t totalEntries = 0;
     size_t totalDataSize = 0;
@@ -166,14 +151,15 @@
               "allocated memory was not large enough");
     }
 
-    return aCamera;
+    return true;
 }
 
 
-size_t ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
-                                             unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                             size_t &dataSize) {
-    if (aCapElem == nullptr) {
+size_t
+ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
+                                      CameraInfo *aCamera,
+                                      size_t &dataSize) {
+    if (aCapElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -253,10 +239,11 @@
 }
 
 
-size_t ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
-                                       unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                       size_t &dataSize) {
-    if (aParamElem == nullptr) {
+size_t
+ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
+                                  CameraInfo *aCamera,
+                                  size_t &dataSize) {
+    if (aParamElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -291,6 +278,54 @@
                     break;
                 }
 
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+                    camera_metadata_enum_android_request_available_capabilities_t *data =
+                        new camera_metadata_enum_android_request_available_capabilities_t[1];
+                    if (ConfigManagerUtil::convertToCameraCapability(
+                           curElem->FindAttribute("synchronized")->Value(),
+                           *data)
+                       ) {
+                        aCamera->cameraMetadata.insert_or_assign(
+                            tag, make_pair(make_unique<void *>(data), 1)
+                        );
+
+                        ++numEntries;
+                        dataSize += calculate_camera_metadata_entry_data_size(
+                                        get_camera_metadata_tag_type(tag), 1
+                                    );
+                    }
+                    break;
+                }
+
+                case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
+                    /* a comma-separated list of physical camera devices */
+                    size_t len = strlen(curElem->FindAttribute("value")->Value());
+                    char *data = new char[len + 1];
+                    memcpy(data,
+                           curElem->FindAttribute("value")->Value(),
+                           len * sizeof(char));
+
+                    /* replace commas with null char */
+                    char *p = data;
+                    while (*p != '\0') {
+                        if (*p == ',') {
+                            *p = '\0';
+                        }
+                        ++p;
+                    }
+
+                    aCamera->cameraMetadata.insert_or_assign(
+                        tag, make_pair(make_unique<void *>(data), len)
+                    );
+
+                    ++numEntries;
+                    dataSize += calculate_camera_metadata_entry_data_size(
+                                    get_camera_metadata_tag_type(tag), len
+                                );
+                    break;
+                }
+
+
                 /* TODO(b/140416878): add vendor-defined/custom tag support */
 
                 default:
@@ -307,10 +342,11 @@
 }
 
 
-bool ConfigManager::constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
-                                            const size_t totalEntries,
-                                            const size_t totalDataSize) {
-    if (!aCamera->allocate(totalEntries, totalDataSize)) {
+bool
+ConfigManager::constructCameraMetadata(CameraInfo *aCamera,
+                                       const size_t totalEntries,
+                                       const size_t totalDataSize) {
+    if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
         ALOGE("Failed to allocate memory for camera metadata");
         return false;
     }
@@ -502,18 +538,137 @@
         return false;
     }
 
+    unique_lock<mutex> lock(mConfigLock);
+    mIsReady = false;
+
     /* read configuration data into the internal buffer */
     srcFile.read(mBuffer, sizeof(mBuffer));
     ALOGV("%s: %ld bytes are read", __FUNCTION__, (long)srcFile.gcount());
     char *p = mBuffer;
+    size_t sz = 0;
+
+    /* read number of camera group information entries */
+    size_t ngrps = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
+
+    /* read each camera information entry */
+    for (auto cidx = 0; cidx < ngrps; ++cidx) {
+        /* read camera identifier */
+        string cameraId = *(reinterpret_cast<string *>(p)); p += sizeof(string);
+
+        /* size of camera_metadata_t */
+        size_t num_entry = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
+        size_t num_data  = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
+
+        /* create CameraInfo and add it to hash map */
+        unique_ptr<ConfigManager::CameraGroupInfo> aCamera;
+        if (aCamera == nullptr ||
+            !aCamera->allocate(num_entry, num_data))  {
+            ALOGE("Failed to create new CameraInfo object");
+            mCameraInfo.clear();
+            return false;
+        }
+
+        /* controls */
+        typedef struct {
+            CameraParam cid;
+            int32_t min;
+            int32_t max;
+            int32_t step;
+        } CameraCtrl;
+        sz = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
+        CameraCtrl *ptr = reinterpret_cast<CameraCtrl *>(p);
+        for (auto idx = 0; idx < sz; ++idx) {
+            CameraCtrl temp = *ptr++;
+            aCamera->controls.emplace(temp.cid,
+                                      make_tuple(temp.min, temp.max, temp.step));
+        }
+        p = reinterpret_cast<char *>(ptr);
+
+        /* stream configurations */
+        sz = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
+        int32_t *i32_ptr = reinterpret_cast<int32_t *>(p);
+        for (auto idx = 0; idx < sz; ++idx) {
+            const int32_t id = *i32_ptr++;
+
+            std::array<int32_t, kStreamCfgSz> temp;
+            for (auto i = 0; i < kStreamCfgSz; ++i) {
+                temp[i] = *i32_ptr++;
+            }
+            aCamera->streamConfigurations.insert_or_assign(id, temp);
+        }
+        p = reinterpret_cast<char *>(i32_ptr);
+
+        /* synchronization */
+        aCamera->synchronized =
+            *(reinterpret_cast<int32_t *>(p)); p += sizeof(int32_t);
+
+        for (auto idx = 0; idx < num_entry; ++idx) {
+            /* Read camera metadata entries */
+            camera_metadata_tag_t tag =
+                *reinterpret_cast<camera_metadata_tag_t *>(p);
+            p += sizeof(camera_metadata_tag_t);
+            size_t count = *reinterpret_cast<size_t *>(p); p += sizeof(size_t);
+
+            int32_t type = get_camera_metadata_tag_type(tag);
+            switch (type) {
+                case TYPE_BYTE: {
+                    add_camera_metadata_entry(aCamera->characteristics,
+                                              tag,
+                                              p,
+                                              count);
+                    p += count * sizeof(uint8_t);
+                    break;
+                }
+                case TYPE_INT32: {
+                    add_camera_metadata_entry(aCamera->characteristics,
+                                              tag,
+                                              p,
+                                              count);
+                    p += count * sizeof(int32_t);
+                    break;
+                }
+                case TYPE_FLOAT: {
+                    add_camera_metadata_entry(aCamera->characteristics,
+                                              tag,
+                                              p,
+                                              count);
+                    p += count * sizeof(float);
+                    break;
+                }
+                case TYPE_INT64: {
+                    add_camera_metadata_entry(aCamera->characteristics,
+                                              tag,
+                                              p,
+                                              count);
+                    p += count * sizeof(int64_t);
+                    break;
+                }
+                case TYPE_DOUBLE: {
+                    add_camera_metadata_entry(aCamera->characteristics,
+                                              tag,
+                                              p,
+                                              count);
+                    p += count * sizeof(double);
+                    break;
+                }
+                case TYPE_RATIONAL:
+                    p += count * sizeof(camera_metadata_rational_t);
+                    break;
+                default:
+                    ALOGW("Type %d is unknown; data may be corrupted", type);
+                    break;
+            }
+        }
+
+        mCameraInfo.insert_or_assign(cameraId, std::move(aCamera));
+    }
+
+
 
     /* read number of camera information entries */
     size_t ncams = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
-    size_t sz    = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
 
     /* read each camera information entry */
-    unique_lock<mutex> lock(mConfigLock);
-    mIsReady = false;
     for (auto cidx = 0; cidx < ncams; ++cidx) {
         /* read camera identifier */
         string cameraId = *(reinterpret_cast<string *>(p)); p += sizeof(string);
@@ -547,27 +702,16 @@
         }
         p = reinterpret_cast<char *>(ptr);
 
-        /* frame rates */
-        sz = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
-        int32_t *i32_ptr = reinterpret_cast<int32_t *>(p);
-        for (auto idx = 0; idx < sz; ++idx) {
-            aCamera->frameRates.emplace(*i32_ptr++);
-        }
-        p = reinterpret_cast<char *>(i32_ptr);
-
         /* stream configurations */
         sz = *(reinterpret_cast<size_t *>(p)); p += sizeof(size_t);
-        i32_ptr = reinterpret_cast<int32_t *>(p);
+        int32_t *i32_ptr = reinterpret_cast<int32_t *>(p);
         for (auto idx = 0; idx < sz; ++idx) {
             const int32_t id = *i32_ptr++;
 
             std::array<int32_t, kStreamCfgSz> temp;
-            temp[0] = *i32_ptr++;
-            temp[1] = *i32_ptr++;
-            temp[2] = *i32_ptr++;
-            temp[3] = *i32_ptr++;
-            temp[4] = *i32_ptr++;
-            temp[5] = *i32_ptr++;
+            for (auto i = 0; i < kStreamCfgSz; ++i) {
+                temp[i] = *i32_ptr++;
+            }
             aCamera->streamConfigurations.insert_or_assign(id, temp);
         }
         p = reinterpret_cast<char *>(i32_ptr);
@@ -661,10 +805,113 @@
     /* lock a configuration data while it's being written to the filesystem */
     lock_guard<mutex> lock(mConfigLock);
 
-    size_t sz = mCameraInfo.size();
+    /* write camera group information */
+    size_t sz = mCameraGroups.size();
     outFile.write(reinterpret_cast<const char *>(&sz),
                   sizeof(size_t));
-    for (auto &[camId, camInfo] : mCameraInfo) {
+    for (auto&& [camId, camInfo] : mCameraGroups) {
+        ALOGI("Storing camera group %s", camId.c_str());
+
+        /* write a camera identifier string */
+        outFile.write(reinterpret_cast<const char *>(&camId),
+                      sizeof(string));
+
+        /* controls */
+        sz = camInfo->controls.size();
+        outFile.write(reinterpret_cast<const char *>(&sz),
+                      sizeof(size_t));
+        for (auto&& [ctrl, range] : camInfo->controls) {
+            outFile.write(reinterpret_cast<const char *>(&ctrl),
+                          sizeof(CameraParam));
+            outFile.write(reinterpret_cast<const char *>(&get<0>(range)),
+                          sizeof(int32_t));
+            outFile.write(reinterpret_cast<const char *>(&get<1>(range)),
+                          sizeof(int32_t));
+            outFile.write(reinterpret_cast<const char *>(&get<2>(range)),
+                          sizeof(int32_t));
+        }
+
+        /* stream configurations */
+        sz = camInfo->streamConfigurations.size();
+        outFile.write(reinterpret_cast<const char *>(&sz),
+                      sizeof(size_t));
+        for (auto&& [sid, cfg] : camInfo->streamConfigurations) {
+            outFile.write(reinterpret_cast<const char *>(sid),
+                          sizeof(int32_t));
+            for (int idx = 0; idx < kStreamCfgSz; ++idx) {
+                outFile.write(reinterpret_cast<const char *>(&cfg[idx]),
+                              sizeof(int32_t));
+            }
+        }
+
+        /* synchronization */
+        outFile.write(reinterpret_cast<const char *>(&camInfo->synchronized),
+                      sizeof(int32_t));
+
+        /* size of camera_metadata_t */
+        size_t num_entry = 0;
+        size_t num_data  = 0;
+        if (camInfo->characteristics != nullptr) {
+            num_entry = get_camera_metadata_entry_count(camInfo->characteristics);
+            num_data  = get_camera_metadata_data_count(camInfo->characteristics);
+        }
+        outFile.write(reinterpret_cast<const char *>(&num_entry),
+                      sizeof(size_t));
+        outFile.write(reinterpret_cast<const char *>(&num_data),
+                      sizeof(size_t));
+
+        /* write each camera metadata entry */
+        if (num_entry > 0) {
+            camera_metadata_entry_t entry;
+            for (auto idx = 0; idx < num_entry; ++idx) {
+                if (get_camera_metadata_entry(camInfo->characteristics, idx, &entry)) {
+                    ALOGE("Failed to retrieve camera metadata entry %d", idx);
+                    outFile.close();
+                    return false;
+                }
+
+                outFile.write(reinterpret_cast<const char *>(&entry.tag),
+                              sizeof(entry.tag));
+                outFile.write(reinterpret_cast<const char *>(&entry.count),
+                              sizeof(entry.count));
+
+                int32_t type = get_camera_metadata_tag_type(entry.tag);
+                switch (type) {
+                    case TYPE_BYTE:
+                        outFile.write(reinterpret_cast<const char *>(entry.data.u8),
+                                      sizeof(uint8_t) * entry.count);
+                        break;
+                    case TYPE_INT32:
+                        outFile.write(reinterpret_cast<const char *>(entry.data.i32),
+                                      sizeof(int32_t) * entry.count);
+                        break;
+                    case TYPE_FLOAT:
+                        outFile.write(reinterpret_cast<const char *>(entry.data.f),
+                                      sizeof(float) * entry.count);
+                        break;
+                    case TYPE_INT64:
+                        outFile.write(reinterpret_cast<const char *>(entry.data.i64),
+                                      sizeof(int64_t) * entry.count);
+                        break;
+                    case TYPE_DOUBLE:
+                        outFile.write(reinterpret_cast<const char *>(entry.data.d),
+                                      sizeof(double) * entry.count);
+                        break;
+                    case TYPE_RATIONAL:
+                        [[fallthrough]];
+                    default:
+                        ALOGW("Type %d is not supported", type);
+                        break;
+                }
+            }
+        }
+    }
+
+    /* write camera device information */
+    sz = mCameraInfo.size();
+    outFile.write(reinterpret_cast<const char *>(&sz),
+                  sizeof(size_t));
+    for (auto&& [camId, camInfo] : mCameraInfo) {
         ALOGI("Storing camera %s", camId.c_str());
 
         /* write a camera identifier string */
@@ -686,20 +933,11 @@
                           sizeof(int32_t));
         }
 
-        /* frame rates */
-        sz = camInfo->frameRates.size();
-        outFile.write(reinterpret_cast<const char *>(&sz),
-                      sizeof(size_t));
-        for (auto fps : camInfo->frameRates) {
-            outFile.write(reinterpret_cast<const char *>(&fps),
-                          sizeof(int32_t));
-        }
-
         /* stream configurations */
         sz = camInfo->streamConfigurations.size();
         outFile.write(reinterpret_cast<const char *>(&sz),
                       sizeof(size_t));
-        for (auto &[sid, cfg] : camInfo->streamConfigurations) {
+        for (auto&& [sid, cfg] : camInfo->streamConfigurations) {
             outFile.write(reinterpret_cast<const char *>(sid),
                           sizeof(int32_t));
             for (int idx = 0; idx < kStreamCfgSz; ++idx) {
@@ -777,16 +1015,6 @@
 }
 
 
-void ConfigManager::addCameraDevices(const char *devices,
-                                     unique_ptr<CameraGroup> &aGroup) {
-    stringstream device_list(devices);
-    string token;
-    while (getline(device_list, token, ',')) {
-        aGroup->devices.emplace(token);
-    }
-}
-
-
 std::unique_ptr<ConfigManager> ConfigManager::Create(const char *path) {
     unique_ptr<ConfigManager> cfgMgr(new ConfigManager(path));
 
diff --git a/evs/sampleDriver/ConfigManager.h b/evs/sampleDriver/ConfigManager.h
index 162d2f3..88ecce1 100644
--- a/evs/sampleDriver/ConfigManager.h
+++ b/evs/sampleDriver/ConfigManager.h
@@ -82,9 +82,6 @@
         unordered_map<CameraParam,
                       tuple<int32_t, int32_t, int32_t>> controls;
 
-        /* List of supported frame rates */
-        unordered_set<int32_t> frameRates;
-
         /*
          * List of supported output stream configurations; each array stores
          * format, width, height, and direction values in the order.
@@ -96,27 +93,21 @@
          * data and number of elements
          */
         unordered_map<camera_metadata_tag_t,
-                      pair<unique_ptr<void *>, size_t>> cameraMetadata;
+                      pair<void *, size_t>> cameraMetadata;
 
         /* Camera module characteristics */
         camera_metadata_t *characteristics;
     };
 
-    class CameraGroup {
+    class CameraGroupInfo : public CameraInfo {
     public:
-        CameraGroup() {}
+        CameraGroupInfo() {}
 
         /* ID of member camera devices */
         unordered_set<string> devices;
 
         /* The capture operation of member camera devices are synchronized */
-        bool synchronized = false;
-
-        /*
-         * List of stream configurations that are supposed by all camera devices
-         * in this group.
-         */
-        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
+        int32_t synchronized = 0;
     };
 
     class SystemInfo {
@@ -147,19 +138,19 @@
     }
 
     /*
-     * Return a list of cameras
+     * Return a list of camera identifiers
      *
      * This function assumes that it is not being called frequently.
      *
      * @return vector<string>
      *         A vector that contains unique camera device identifiers.
      */
-    vector<string> getCameraList() {
+    vector<string> getCameraIdList() {
         unique_lock<mutex> lock(mConfigLock);
         mConfigCond.wait(lock, [this] { return mIsReady; });
 
         vector<string> aList;
-        for (auto &v : mCameraInfo) {
+        for (auto&& v : mCameraInfo) {
             aList.emplace_back(v.first);
         }
 
@@ -167,12 +158,32 @@
     }
 
     /*
-     * Return a list of cameras
+     * Return a list of camera group identifiers
+     *
+     * This function assumes that it is not being called frequently.
+     *
+     * @return vector<string>
+     *         A vector that contains unique camera device identifiers.
+     */
+    vector<string> getCameraGroupIdList() {
+        unique_lock<mutex> lock(mConfigLock);
+        mConfigCond.wait(lock, [this] { return mIsReady; });
+
+        vector<string> aList;
+        for (auto&& v : mCameraGroups) {
+            aList.emplace_back(v.first);
+        }
+
+        return aList;
+    }
+
+    /*
+     * Return a pointer to the camera group
      *
      * @return CameraGroup
      *         A pointer to a camera group identified by a given id.
      */
-    unique_ptr<CameraGroup>& getCameraGroup(const string& gid) {
+    unique_ptr<CameraGroupInfo>& getCameraGroupInfo(const string& gid) {
         unique_lock<mutex> lock(mConfigLock);
         mConfigCond.wait(lock, [this] { return mIsReady; });
 
@@ -224,7 +235,7 @@
     unordered_map<string, unique_ptr<DisplayInfo>> mDisplayInfo;
 
     /* Camera groups are stored in <groud id, CameraGroup> hash map */
-    unordered_map<string, unique_ptr<CameraGroup>> mCameraGroups;
+    unordered_map<string, unique_ptr<CameraGroupInfo>> mCameraGroups;
 
     /*
      * Camera positions are stored in <position, camera id set> hash map.
@@ -288,16 +299,19 @@
     /*
      * read camera device information
      *
-     * @param  aDeviceElem
+     * @param  aCamera
+     *         A pointer to CameraInfo that will be completed by this
+     *         method.
+     *         aDeviceElem
      *         A pointer to "device" XML element that contains camera module
      *         capability info and its characteristics.
      *
-     * @return unique_ptr<CameraInfo>
-     *         A pointer to CameraInfo class that contains camera module
-     *         capability and characteristics.  Please note that this transfers
-     *         the ownership of created CameraInfo to the caller.
+     * @return bool
+     *         Return false upon any failure in reading and processing camera
+     *         device information.
      */
-    unique_ptr<CameraInfo> readCameraDeviceInfo(const XMLElement *aDeviceElem);
+    bool readCameraDeviceInfo(CameraInfo *aCamera,
+                              const XMLElement *aDeviceElem);
 
     /*
      * read camera metadata
@@ -315,7 +329,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraCapabilities(const XMLElement * const aCapElem,
-                                  unique_ptr<CameraInfo> &aCamera,
+                                  CameraInfo *aCamera,
                                   size_t &dataSize);
 
     /*
@@ -333,7 +347,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraMetadata(const XMLElement * const aParamElem,
-                              unique_ptr<CameraInfo> &aCamera,
+                              CameraInfo *aCamera,
                               size_t &dataSize);
 
     /*
@@ -351,23 +365,11 @@
      *         or its size is not large enough to add all found camera metadata
      *         entries.
      */
-    bool constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+    bool constructCameraMetadata(CameraInfo *aCamera,
                                  const size_t totalEntries,
                                  const size_t totalDataSize);
 
     /*
-     * parse a comma-separated list of camera devices and add them to
-     * CameraGroup.
-     *
-     * @param  devices
-     *         A comma-separated list of camera device identifiers.
-     * @param  aGroup
-     *         Camera group which cameras will be added to.
-     */
-    void addCameraDevices(const char *devices,
-                          unique_ptr<CameraGroup> &aGroup);
-
-    /*
      * Read configuration data from the binary file
      *
      * @return bool
diff --git a/evs/sampleDriver/ConfigManagerUtil.cpp b/evs/sampleDriver/ConfigManagerUtil.cpp
index 8206daa..d10f236 100644
--- a/evs/sampleDriver/ConfigManagerUtil.cpp
+++ b/evs/sampleDriver/ConfigManagerUtil.cpp
@@ -90,6 +90,30 @@
         aTag =  ANDROID_LENS_POSE_ROTATION;
     } else if (!strcmp(name, "LENS_POSE_TRANSLATION")) {
         aTag =  ANDROID_LENS_POSE_TRANSLATION;
+    } else if (!strcmp(name, "REQUEST_AVAILABLE_CAPABILITIES")) {
+        aTag =  ANDROID_REQUEST_AVAILABLE_CAPABILITIES;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA_PHYSICAL_IDS")) {
+        aTag =  ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS;
+    } else {
+        return false;
+    }
+
+    return true;
+}
+
+
+bool ConfigManagerUtil::convertToCameraCapability(
+    const char *name,
+    camera_metadata_enum_android_request_available_capabilities_t &cap) {
+
+    if (!strcmp(name, "DEPTH_OUTPUT")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA;
+    } else if (!strcmp(name, "MONOCHROME")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME;
+    } else if (!strcmp(name, "SECURE_IMAGE_DATA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
     } else {
         return false;
     }
diff --git a/evs/sampleDriver/ConfigManagerUtil.h b/evs/sampleDriver/ConfigManagerUtil.h
index 8c89ae7..1710cac 100644
--- a/evs/sampleDriver/ConfigManagerUtil.h
+++ b/evs/sampleDriver/ConfigManagerUtil.h
@@ -55,6 +55,14 @@
      */
     static string trimString(const string &src,
                              const string &ws = " \n\r\t\f\v");
+
+    /**
+     * Convert a given string to corresponding camera capabilities
+     */
+    static bool convertToCameraCapability(
+        const char *name,
+        camera_metadata_enum_android_request_available_capabilities_t &cap);
+
 };
 
 #endif // CONFIG_MANAGER_UTIL_H
diff --git a/evs/sampleDriver/EvsEnumerator.cpp b/evs/sampleDriver/EvsEnumerator.cpp
index 70cbaec..ac014a0 100644
--- a/evs/sampleDriver/EvsEnumerator.cpp
+++ b/evs/sampleDriver/EvsEnumerator.cpp
@@ -114,7 +114,18 @@
             } else if (cmd_addition) {
                 // NOTE: we are here adding new device without a validation
                 // because it always fails to open, b/132164956.
-                sCameraList.emplace(devpath, devpath.c_str());
+                CameraRecord cam(devpath.c_str());
+                if (sConfigManager != nullptr) {
+                    unique_ptr<ConfigManager::CameraInfo> &camInfo =
+                        sConfigManager->getCameraInfo(devpath);
+                    if (camInfo != nullptr) {
+                        cam.desc.metadata.setToExternal(
+                            (uint8_t *)camInfo->characteristics,
+                             get_camera_metadata_size(camInfo->characteristics)
+                        );
+                    }
+                }
+                sCameraList.emplace(devpath, cam);
                 ALOGI("%s is added", devpath.c_str());
             } else {
                 // Ignore all other actions including "change".
@@ -131,19 +142,13 @@
 EvsEnumerator::EvsEnumerator() {
     ALOGD("EvsEnumerator created");
 
-    std::thread initCfgMgr;
     if (sConfigManager == nullptr) {
         /* loads and initializes ConfigManager in a separate thread */
-        initCfgMgr = std::thread([](){
-            sConfigManager =
-                ConfigManager::Create("/etc/automotive/evs/evs_sample_configuration.xml");
-        });
+        sConfigManager =
+            ConfigManager::Create("/etc/automotive/evs/evs_sample_configuration.xml");
     }
 
     enumerateDevices();
-    if (initCfgMgr.joinable()) {
-        initCfgMgr.join();
-    }
 }
 
 void EvsEnumerator::enumerateDevices() {
diff --git a/evs/sampleDriver/EvsV4lCamera.cpp b/evs/sampleDriver/EvsV4lCamera.cpp
index 6f7eb89..0ef09a7 100644
--- a/evs/sampleDriver/EvsV4lCamera.cpp
+++ b/evs/sampleDriver/EvsV4lCamera.cpp
@@ -233,7 +233,7 @@
         // V1.1 client is waiting on STREAM_STOPPED event.
         std::unique_lock <std::mutex> lock(mAccessLock);
 
-        EvsEvent event;
+        EvsEventDesc event;
         event.aType = EvsEventType::STREAM_STOPPED;
         auto result = mStream_1_1->notify(event);
         if (!result.isOk()) {
@@ -294,10 +294,14 @@
 }
 
 
-Return<EvsResult> EvsV4lCamera::doneWithFrame_1_1(const BufferDesc_1_1& buffer)  {
-    ALOGD("doneWithFrame");
+Return<EvsResult> EvsV4lCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers)  {
+    ALOGD(__FUNCTION__);
 
-    return doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle);
+    for (auto&& buffer : buffers) {
+        doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle);
+    }
+
+    return EvsResult::OK;
 }
 
 
@@ -367,8 +371,10 @@
 Return<void> EvsV4lCamera::setIntParameter(CameraParam id, int32_t value,
                                            setIntParameter_cb _hidl_cb) {
     uint32_t v4l2cid = V4L2_CID_BASE;
+    hidl_vec<int32_t> values;
+    values.resize(1);
     if (!convertToV4l2CID(id, v4l2cid)) {
-        _hidl_cb(EvsResult::INVALID_ARG, 0);
+        _hidl_cb(EvsResult::INVALID_ARG, values);
     } else {
         EvsResult result = EvsResult::OK;
         v4l2_control control = {v4l2cid, value};
@@ -377,7 +383,8 @@
             result = EvsResult::UNDERLYING_SERVICE_ERROR;
         }
 
-        _hidl_cb(result, control.value);
+        values[0] = control.value;
+        _hidl_cb(result, values);
     }
 
     return Void();
@@ -387,8 +394,10 @@
 Return<void> EvsV4lCamera::getIntParameter(CameraParam id,
                                            getIntParameter_cb _hidl_cb) {
     uint32_t v4l2cid = V4L2_CID_BASE;
+    hidl_vec<int32_t> values;
+    values.resize(1);
     if (!convertToV4l2CID(id, v4l2cid)) {
-        _hidl_cb(EvsResult::INVALID_ARG, 0);
+        _hidl_cb(EvsResult::INVALID_ARG, values);
     } else {
         EvsResult result = EvsResult::OK;
         v4l2_control control = {v4l2cid, 0};
@@ -397,7 +406,8 @@
         }
 
         // Report a result
-        _hidl_cb(result, control.value);
+        values[0] = control.value;
+        _hidl_cb(result, values);
     }
 
     return Void();
@@ -570,7 +580,7 @@
 
 
 // This is the async callback from the video camera that tells us a frame is ready
-void EvsV4lCamera::forwardFrame(imageBuffer* /*pV4lBuff*/, void* pData) {
+void EvsV4lCamera::forwardFrame(imageBuffer* pV4lBuff, void* pData) {
     bool readyForFrame = false;
     size_t idx = 0;
 
@@ -620,6 +630,10 @@
         pDesc->stride = mStride;
         bufDesc_1_1.buffer.nativeHandle = mBuffers[idx].handle;
         bufDesc_1_1.bufferId = idx;
+        bufDesc_1_1.deviceId = mDescription.v1.cameraId;
+        // timestamp in microseconds.
+        bufDesc_1_1.timestamp =
+            pV4lBuff->timestamp.tv_sec * 1e+6 + pV4lBuff->timestamp.tv_usec;
 
         // Lock our output buffer for writing
         void *targetPixels = nullptr;
@@ -650,7 +664,10 @@
         // the lock
         bool flag = false;
         if (mStream_1_1 != nullptr) {
-            auto result = mStream_1_1->deliverFrame_1_1(bufDesc_1_1);
+            hidl_vec<BufferDesc_1_1> frames;
+            frames.resize(1);
+            frames[0] = bufDesc_1_1;
+            auto result = mStream_1_1->deliverFrame_1_1(frames);
             flag = result.isOk();
         } else {
             BufferDesc_1_0 bufDesc_1_0 = {
diff --git a/evs/sampleDriver/EvsV4lCamera.h b/evs/sampleDriver/EvsV4lCamera.h
index 9f40b6a..89205c1 100644
--- a/evs/sampleDriver/EvsV4lCamera.h
+++ b/evs/sampleDriver/EvsV4lCamera.h
@@ -66,7 +66,7 @@
     Return<void>      getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)  override;
     Return<EvsResult> pauseVideoStream() override;
     Return<EvsResult> resumeVideoStream() override;
-    Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<EvsResult> doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
     Return<EvsResult> setMaster() override;
     Return<EvsResult> forceMaster(const sp<IEvsDisplay>&) override;
     Return<EvsResult> unsetMaster() override;
diff --git a/evs/sampleDriver/VideoCapture.cpp b/evs/sampleDriver/VideoCapture.cpp
index ac8266a..9be55cc 100644
--- a/evs/sampleDriver/VideoCapture.cpp
+++ b/evs/sampleDriver/VideoCapture.cpp
@@ -179,6 +179,7 @@
     ALOGI("Buffer description:");
     ALOGI("  offset: %d", mBufferInfo.m.offset);
     ALOGI("  length: %d", mBufferInfo.length);
+    ALOGI("  flags : 0x%X", mBufferInfo.flags);
 
     // Get a pointer to the buffer contents by mapping into our address space
     mPixelBuffer = mmap(
diff --git a/evs/sampleDriver/resources/evs_configuration.dtd b/evs/sampleDriver/resources/evs_configuration.dtd
index a6aa5ca..d6be018 100644
--- a/evs/sampleDriver/resources/evs_configuration.dtd
+++ b/evs/sampleDriver/resources/evs_configuration.dtd
@@ -39,9 +39,9 @@
                              a member of the group.
          @attr device_id   : Comma-separated list of unique camera identifiers of member camera
                              devices.
-         @attr synchronized: Boolean field that tells whether or not this camera is synchronized
-                             with other cameras in the same group.  This will be ineffective if
-                             there is a single camera in the group or a group is invalid.
+         @attr synchronized: NONE if cameras are not synchronized.
+                             CALIBRATED if cameras are synchronized by hardware.
+                             APPROXIMATE if cameras are synchronized by other means.
     -->
     <!ELEMENT group (caps)>
     <!ATTLIST group
diff --git a/evs/sampleDriver/resources/evs_sample_configuration.xml b/evs/sampleDriver/resources/evs_sample_configuration.xml
index 225d51d..2141c39 100644
--- a/evs/sampleDriver/resources/evs_sample_configuration.xml
+++ b/evs/sampleDriver/resources/evs_sample_configuration.xml
@@ -31,13 +31,33 @@
     <!-- camera device information -->
     <camera>
         <!-- camera group 0 -->
-        <group group_id='group1'
-               device_id='/dev/video1,/dev/video2'
-               synchronized='false'
-        >
+        <group id='group0' synchronized='CALIBRATED'>
             <caps>
+                <!-- list of supported controls supported by all physical devices -->
+                <supported_controls>
+                    <control name='BRIGHTNESS' min='0' max='255'/>
+                    <control name='CONTRAST' min='0' max='255'/>
+                </supported_controls>
+
+                <!-- list of stream configuration supported by all physical devices -->
                 <stream id='0' width='960' height='540' format='RGBA_8888' framerate='30'/>
             </caps>
+
+            <!-- list of parameters -->
+            <characteristics>
+                <parameter
+                    name='REQUEST_AVAILABLE_CAPABILITIES'
+                    type='enum'
+                    size='1'
+                    value='LOGICAL_MULTI_CAMERA'
+                />
+                <parameter
+                    name='LOGICAL_MULTI_CAMERA_PHYSICAL_IDS'
+                    type='byte[]'
+                    size='2'
+                    value='/dev/video1,/dev/video2'
+                />
+            </characteristics>
         </group>
 
         <!-- camera device starts -->