Add support for frame timestamps from QEMU

Test: Manual Test of Camera app against emu-master-dev and released QEMU
builds.  Manual test of camera/sensor sample app verifying matching
timestamps.

Change-Id: I567f2cd65688487f89f6c262148008789167414c
diff --git a/camera/CallbackNotifier.cpp b/camera/CallbackNotifier.cpp
index 865a34e..e7fea45 100755
--- a/camera/CallbackNotifier.cpp
+++ b/camera/CallbackNotifier.cpp
@@ -241,9 +241,11 @@
         const size_t frameSize = camera_dev->getVideoFrameBufferSize();
         camera_memory_t* cam_buff = mGetMemoryCB(-1, frameSize, 1, mCBOpaque);
         if (NULL != cam_buff && NULL != cam_buff->data) {
-            camera_dev->getCurrentFrame(cam_buff->data, V4L2_PIX_FMT_YUV420);
-            mDataCBTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME,
-                               cam_buff, 0, mCBOpaque);
+            int64_t frame_timestamp = 0L;
+            camera_dev->getCurrentFrame(cam_buff->data, V4L2_PIX_FMT_YUV420,
+                                        &frame_timestamp);
+            mDataCBTimestamp(frame_timestamp != 0L ? frame_timestamp : timestamp,
+                             CAMERA_MSG_VIDEO_FRAME, cam_buff, 0, mCBOpaque);
             mCameraMemoryTs.push_back( cam_buff );
         } else {
             ALOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
@@ -254,8 +256,10 @@
         camera_memory_t* cam_buff =
             mGetMemoryCB(-1, camera_dev->getFrameBufferSize(), 1, mCBOpaque);
         if (NULL != cam_buff && NULL != cam_buff->data) {
+            int64_t frame_timestamp = 0L;
             camera_dev->getCurrentFrame(cam_buff->data,
-                                        camera_dev->getOriginalPixelFormat());
+                                        camera_dev->getOriginalPixelFormat(),
+                                        &frame_timestamp);
             mDataCB(CAMERA_MSG_PREVIEW_FRAME, cam_buff, 0, NULL, mCBOpaque);
             cam_buff->release(cam_buff);
         } else {
diff --git a/camera/EmulatedCameraDevice.cpp b/camera/EmulatedCameraDevice.cpp
index e2d9412..31e2aff 100755
--- a/camera/EmulatedCameraDevice.cpp
+++ b/camera/EmulatedCameraDevice.cpp
@@ -221,7 +221,8 @@
 }
 
 status_t EmulatedCameraDevice::getCurrentFrame(void* buffer,
-                                               uint32_t pixelFormat)
+                                               uint32_t pixelFormat,
+                                               int64_t* timestamp)
 {
     if (!isStarted()) {
         ALOGE("%s: Device is not started", __FUNCTION__);
@@ -238,12 +239,18 @@
         ALOGE("%s: No framebuffer", __FUNCTION__);
         return EINVAL;
     }
+
+    if (timestamp != nullptr) {
+      *timestamp = mCameraThread->getPrimaryTimestamp();
+    }
+
     return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(source),
                                reinterpret_cast<uint8_t*>(buffer),
                                pixelFormat);
 }
 
-status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
+status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer,
+                                                      int64_t* timestamp)
 {
     if (!isStarted()) {
         ALOGE("%s: Device is not started", __FUNCTION__);
@@ -261,6 +268,10 @@
         return EINVAL;
     }
 
+    if (timestamp != nullptr) {
+      *timestamp = mCameraThread->getPrimaryTimestamp();
+    }
+
     /* In emulation the framebuffer is never RGB. */
     switch (mPixelFormat) {
         case V4L2_PIX_FMT_YVU420:
@@ -461,6 +472,13 @@
     return nullptr;
 }
 
+int64_t EmulatedCameraDevice::CameraThread::getPrimaryTimestamp() const {
+    if (mFrameProducer.get()) {
+        return mFrameProducer->getPrimaryTimestamp();
+    }
+    return 0L;
+}
+
 void EmulatedCameraDevice::CameraThread::lockPrimaryBuffer() {
     mFrameProducer->lockPrimaryBuffer();
 }
@@ -549,6 +567,8 @@
       mOpaque(opaque),
       mPrimaryBuffer(primaryBuffer),
       mSecondaryBuffer(secondaryBuffer),
+      mPrimaryTimestamp(0L),
+      mSecondaryTimestamp(0L),
       mLastFrame(0),
       mHasFrame(false) {
 
@@ -559,6 +579,11 @@
     return mPrimaryBuffer;
 }
 
+int64_t
+EmulatedCameraDevice::CameraThread::FrameProducer::getPrimaryTimestamp() const {
+    return mPrimaryTimestamp;
+}
+
 void EmulatedCameraDevice::CameraThread::FrameProducer::lockPrimaryBuffer() {
     mBufferMutex.lock();
 }
@@ -650,7 +675,7 @@
 
     // Produce one frame and place it in the secondary buffer
     mLastFrame = systemTime(SYSTEM_TIME_MONOTONIC);
-    if (!mProducer(mOpaque, mSecondaryBuffer)) {
+    if (!mProducer(mOpaque, mSecondaryBuffer, &mSecondaryTimestamp)) {
         ALOGE("FrameProducer could not produce frame, exiting thread");
         mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
         return false;
@@ -660,6 +685,7 @@
         // Switch buffers now that the secondary buffer is ready
         Mutex::Autolock lock(mBufferMutex);
         std::swap(mPrimaryBuffer, mSecondaryBuffer);
+        std::swap(mPrimaryTimestamp, mSecondaryTimestamp);
     }
     mHasFrame = true;
     return true;
diff --git a/camera/EmulatedCameraDevice.h b/camera/EmulatedCameraDevice.h
index 85422eb..af26d68 100755
--- a/camera/EmulatedCameraDevice.h
+++ b/camera/EmulatedCameraDevice.h
@@ -196,10 +196,13 @@
      *  pixelFormat - The pixel format to convert to, use
      *                getOriginalPixelFormat() to get the configured pixel
      *                format (if using this no conversion will be needed)
+     *  timestamp - Receives the timestamp at which the preview frame was
+     *              generated.
      * Return:
      *  NO_ERROR on success, or an appropriate error status.
      */
-    virtual status_t getCurrentFrame(void* buffer, uint32_t pixelFormat);
+    virtual status_t getCurrentFrame(void* buffer, uint32_t pixelFormat,
+                                     int64_t* timestamp);
 
     /* Gets current framebuffer, converted into preview frame format.
      * This method must be called on a connected instance of this class with a
@@ -212,10 +215,12 @@
      * current frame be locked using a FrameLock object.
      * Param:
      *  buffer - Buffer, large enough to contain the entire preview frame.
+     *  timestamp - Receives the timestamp at which the preview frame was
+     *              generated.
      * Return:
      *  NO_ERROR on success, or an appropriate error status.
      */
-    virtual status_t getCurrentPreviewFrame(void* buffer);
+    virtual status_t getCurrentPreviewFrame(void* buffer, int64_t* timestamp);
 
     /* Gets a pointer to the current frame buffer in its raw format.
      * This method must be called on a connected instance of this class with a
@@ -450,7 +455,7 @@
      * a pointer to this method. The method is expected to know what size frames
      * it provided to the producer thread. Returning false indicates an
      * unrecoverable error that will stop the frame production thread. */
-    virtual bool produceFrame(void* buffer) = 0;
+    virtual bool produceFrame(void* buffer, int64_t* timestamp) = 0;
 
     /* Get the primary buffer to use when constructing the FrameProducer. */
     virtual void* getPrimaryBuffer() {
@@ -468,7 +473,9 @@
      * frame production and delivery but can't be blocking the camera HAL. */
     class CameraThread : public WorkerThread {
     public:
-        typedef bool (*ProduceFrameFunc)(void* opaque, void* destinationBuffer);
+        typedef bool (*ProduceFrameFunc)(void* opaque,
+                                         void* destinationBuffer,
+                                         int64_t* destinationTimestamp);
         CameraThread(EmulatedCameraDevice* cameraDevice,
                      ProduceFrameFunc producer,
                      void* producerOpaque);
@@ -479,6 +486,7 @@
          * without first having created a Lock can lead to contents changing
          * without notice. */
         const void* getPrimaryBuffer() const;
+        int64_t getPrimaryTimestamp() const;
 
         /* Lock and unlock the primary buffer */
         void lockPrimaryBuffer();
@@ -512,6 +520,7 @@
             bool hasFrame() const;
 
             const void* getPrimaryBuffer() const;
+            int64_t getPrimaryTimestamp() const;
 
             void lockPrimaryBuffer();
             void unlockPrimaryBuffer();
@@ -523,6 +532,8 @@
             void* mOpaque;
             void* mPrimaryBuffer;
             void* mSecondaryBuffer;
+            int64_t mPrimaryTimestamp;
+            int64_t mSecondaryTimestamp;
             nsecs_t mLastFrame;
             mutable Mutex mBufferMutex;
             std::atomic<bool> mHasFrame;
@@ -635,9 +646,10 @@
      */
     void unlockCurrentFrame();
 
-    static bool staticProduceFrame(void* opaque, void* buffer) {
+    static bool staticProduceFrame(void* opaque, void* buffer,
+                                   int64_t* timestamp) {
         auto cameraDevice = reinterpret_cast<EmulatedCameraDevice*>(opaque);
-        return cameraDevice->produceFrame(buffer);
+        return cameraDevice->produceFrame(buffer, timestamp);
     }
 
     /* A flag indicating if an auto-focus completion event should be sent the
diff --git a/camera/EmulatedFakeCameraDevice.cpp b/camera/EmulatedFakeCameraDevice.cpp
index d3cdd78..747c55a 100755
--- a/camera/EmulatedFakeCameraDevice.cpp
+++ b/camera/EmulatedFakeCameraDevice.cpp
@@ -203,7 +203,7 @@
  * Worker thread management overrides.
  ***************************************************************************/
 
-bool EmulatedFakeCameraDevice::produceFrame(void* buffer)
+bool EmulatedFakeCameraDevice::produceFrame(void* buffer, int64_t* timestamp)
 {
 #if EFCD_ROTATE_FRAME
     const int frame_type = rotateFrame();
@@ -221,6 +221,9 @@
 #else
     drawCheckerboard(buffer);
 #endif  // EFCD_ROTATE_FRAME
+    if (timestamp != nullptr) {
+      *timestamp = 0L;
+    }
     return true;
 }
 
diff --git a/camera/EmulatedFakeCameraDevice.h b/camera/EmulatedFakeCameraDevice.h
index a3e9201..f1f5252 100755
--- a/camera/EmulatedFakeCameraDevice.h
+++ b/camera/EmulatedFakeCameraDevice.h
@@ -85,7 +85,7 @@
 
 protected:
     /* Implementation of the frame production routine. */
-    bool produceFrame(void* buffer) override;
+    bool produceFrame(void* buffer, int64_t* timestamp) override;
 
     /****************************************************************************
      * Fake camera device private API
diff --git a/camera/EmulatedFakeRotatingCameraDevice.cpp b/camera/EmulatedFakeRotatingCameraDevice.cpp
index 47dc162..0e54b6c 100755
--- a/camera/EmulatedFakeRotatingCameraDevice.cpp
+++ b/camera/EmulatedFakeRotatingCameraDevice.cpp
@@ -520,7 +520,8 @@
  * Worker thread management overrides.
  ***************************************************************************/
 
-bool EmulatedFakeRotatingCameraDevice::produceFrame(void* buffer)
+bool EmulatedFakeRotatingCameraDevice::produceFrame(void* buffer,
+                                                    int64_t* timestamp)
 {
     if (mOpenglReady == false) {
         init_gl_surface(mFrameWidth, mFrameHeight);
diff --git a/camera/EmulatedFakeRotatingCameraDevice.h b/camera/EmulatedFakeRotatingCameraDevice.h
index f983564..33b4b32 100755
--- a/camera/EmulatedFakeRotatingCameraDevice.h
+++ b/camera/EmulatedFakeRotatingCameraDevice.h
@@ -73,7 +73,7 @@
 
 protected:
     /* Implementation of the frame production routine. */
-    bool produceFrame(void* buffer) override;
+    bool produceFrame(void* buffer, int64_t* timestamp) override;
 
     /****************************************************************************
      * Fake camera device private API
diff --git a/camera/EmulatedQemuCameraDevice.cpp b/camera/EmulatedQemuCameraDevice.cpp
index 6105d0d..0ff6df2 100755
--- a/camera/EmulatedQemuCameraDevice.cpp
+++ b/camera/EmulatedQemuCameraDevice.cpp
@@ -217,7 +217,8 @@
  ***************************************************************************/
 
 status_t EmulatedQemuCameraDevice::getCurrentFrame(void* buffer,
-                                                   uint32_t pixelFormat) {
+                                                   uint32_t pixelFormat,
+                                                   int64_t* timestamp) {
     if (!isStarted()) {
         ALOGE("%s: Device is not started", __FUNCTION__);
         return EINVAL;
@@ -236,12 +237,18 @@
         ALOGE("%s: No frame", __FUNCTION__);
         return EINVAL;
     }
+
+    if (timestamp != nullptr) {
+        *timestamp = mCameraThread->getPrimaryTimestamp();
+    }
+
     return getCurrentFrameImpl(reinterpret_cast<const uint8_t*>(frame),
                                reinterpret_cast<uint8_t*>(buffer),
                                pixelFormat);
 }
 
-status_t EmulatedQemuCameraDevice::getCurrentPreviewFrame(void* buffer) {
+status_t EmulatedQemuCameraDevice::getCurrentPreviewFrame(void* buffer,
+                                                          int64_t* timestamp) {
     if (!isStarted()) {
         ALOGE("%s: Device is not started", __FUNCTION__);
         return EINVAL;
@@ -260,6 +267,9 @@
         ALOGE("%s: No frame", __FUNCTION__);
         return EINVAL;
     }
+    if (timestamp != nullptr) {
+      *timestamp = mCameraThread->getPrimaryTimestamp();
+    }
     memcpy(buffer, previewFrame, mTotalPixels * 4);
     return NO_ERROR;
 }
@@ -280,7 +290,7 @@
  * Worker thread management overrides.
  ***************************************************************************/
 
-bool EmulatedQemuCameraDevice::produceFrame(void* buffer)
+bool EmulatedQemuCameraDevice::produceFrame(void* buffer, int64_t* timestamp)
 {
     auto frameBufferPair = reinterpret_cast<FrameBufferPair*>(buffer);
     uint8_t* rawFrame = frameBufferPair->first;
@@ -292,7 +302,8 @@
                                                  mWhiteBalanceScale[0],
                                                  mWhiteBalanceScale[1],
                                                  mWhiteBalanceScale[2],
-                                                 mExposureCompensation);
+                                                 mExposureCompensation,
+                                                 timestamp);
     if (query_res != NO_ERROR) {
         ALOGE("%s: Unable to get current video frame: %s",
              __FUNCTION__, strerror(query_res));
diff --git a/camera/EmulatedQemuCameraDevice.h b/camera/EmulatedQemuCameraDevice.h
index ed19f6c..1cabd6f 100755
--- a/camera/EmulatedQemuCameraDevice.h
+++ b/camera/EmulatedQemuCameraDevice.h
@@ -82,10 +82,12 @@
 public:
 
     /* Copy the current frame to |buffer| */
-    status_t getCurrentFrame(void* buffer, uint32_t pixelFormat) override;
+    status_t getCurrentFrame(void* buffer, uint32_t pixelFormat,
+                             int64_t* timestamp) override;
 
     /* Copy the current preview frame to |buffer| */
-    status_t getCurrentPreviewFrame(void* buffer) override;
+    status_t getCurrentPreviewFrame(void* buffer,
+                                    int64_t* timestamp) override;
 
     /* Get a pointer to the current frame, lock it first using FrameLock in
      * EmulatedCameraDevice class */
@@ -99,7 +101,7 @@
 
 protected:
     /* Implementation of the frame production routine. */
-    bool produceFrame(void* buffer) override;
+    bool produceFrame(void* buffer, int64_t* timestamp) override;
 
     void* getPrimaryBuffer() override;
     void* getSecondaryBuffer() override;
diff --git a/camera/PreviewWindow.cpp b/camera/PreviewWindow.cpp
index 607ad7d..4c7ee24 100755
--- a/camera/PreviewWindow.cpp
+++ b/camera/PreviewWindow.cpp
@@ -159,12 +159,14 @@
         return;
     }
 
+    int64_t frame_timestamp = 0L;
     /* Frames come in in YV12/NV12/NV21 format. Since preview window doesn't
      * supports those formats, we need to obtain the frame in RGB565. */
-    res = camera_dev->getCurrentPreviewFrame(img);
+    res = camera_dev->getCurrentPreviewFrame(img, &frame_timestamp);
     if (res == NO_ERROR) {
         /* Show it. */
-        mPreviewWindow->set_timestamp(mPreviewWindow, timestamp);
+        mPreviewWindow->set_timestamp(mPreviewWindow,
+                                      frame_timestamp != 0L ? frame_timestamp : timestamp);
         mPreviewWindow->enqueue_buffer(mPreviewWindow, buffer);
     } else {
         ALOGE("%s: Unable to obtain preview frame: %d", __FUNCTION__, res);
diff --git a/camera/QemuClient.cpp b/camera/QemuClient.cpp
index c769b29..f5cfe6b 100755
--- a/camera/QemuClient.cpp
+++ b/camera/QemuClient.cpp
@@ -508,15 +508,16 @@
                                       float r_scale,
                                       float g_scale,
                                       float b_scale,
-                                      float exposure_comp)
+                                      float exposure_comp,
+                                      int64_t* frame_time)
 {
     ALOGV("%s", __FUNCTION__);
 
     char query_str[256];
-    snprintf(query_str, sizeof(query_str), "%s video=%zu preview=%zu whiteb=%g,%g,%g expcomp=%g",
+    snprintf(query_str, sizeof(query_str), "%s video=%zu preview=%zu whiteb=%g,%g,%g expcomp=%g time=%d",
              mQueryFrame, (vframe && vframe_size) ? vframe_size : 0,
              (pframe && pframe_size) ? pframe_size : 0, r_scale, g_scale, b_scale,
-             exposure_comp);
+             exposure_comp, frame_time != nullptr ? 1 : 0);
     QemuQuery query(query_str);
     doQuery(&query);
     const status_t res = query.getCompletionStatus();
@@ -553,6 +554,14 @@
             return EINVAL;
         }
     }
+    if (frame_time != nullptr) {
+        if (query.mReplyDataSize - cur_offset >= 8) {
+            *frame_time = *reinterpret_cast<const int64_t*>(frame + cur_offset);
+            cur_offset += 8;
+        } else {
+            *frame_time = 0L;
+        }
+    }
 
     return NO_ERROR;
 }
diff --git a/camera/QemuClient.h b/camera/QemuClient.h
index 1a36f4b..ff39608 100755
--- a/camera/QemuClient.h
+++ b/camera/QemuClient.h
@@ -401,6 +401,7 @@
      *      interested only in video frame.
      *  r_scale, g_scale, b_scale - White balance scale.
      *  exposure_comp - Expsoure compensation.
+     *  frame_time - Receives the time at which the queried frame was produced.
      * Return:
      *  NO_ERROR on success, or an appropriate error status on failure.
      */
@@ -411,7 +412,8 @@
                         float r_scale,
                         float g_scale,
                         float b_scale,
-                        float exposure_comp);
+                        float exposure_comp,
+                        int64_t* frame_time);
 
     /****************************************************************************
      * Names of the queries available for the emulated camera.
diff --git a/camera/qemu-pipeline3/QemuSensor.cpp b/camera/qemu-pipeline3/QemuSensor.cpp
index 2408e7f..bf662ab 100644
--- a/camera/qemu-pipeline3/QemuSensor.cpp
+++ b/camera/qemu-pipeline3/QemuSensor.cpp
@@ -266,10 +266,8 @@
     mNextCapturedBuffers = nextBuffers;
 
     if (mNextCapturedBuffers != nullptr) {
-        if (listener != nullptr) {
-            listener->onQemuSensorEvent(frameNumber, QemuSensorListener::EXPOSURE_START,
-                                        mNextCaptureTime);
-        }
+
+        int64_t timestamp = 0L;
 
         // Might be adding more buffers, so size isn't constant.
         for (size_t i = 0; i < mNextCapturedBuffers->size(); ++i) {
@@ -280,10 +278,10 @@
                     b.buffer, b.img);
             switch (b.format) {
                 case HAL_PIXEL_FORMAT_RGB_888:
-                    captureRGB(b.img, b.width, b.height, b.stride);
+                    captureRGB(b.img, b.width, b.height, b.stride, &timestamp);
                     break;
                 case HAL_PIXEL_FORMAT_RGBA_8888:
-                    captureRGBA(b.img, b.width, b.height, b.stride);
+                    captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
                     break;
                 case HAL_PIXEL_FORMAT_BLOB:
                     if (b.dataSpace == HAL_DATASPACE_DEPTH) {
@@ -306,7 +304,7 @@
                     }
                     break;
                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
-                    captureNV21(b.img, b.width, b.height, b.stride);
+                    captureNV21(b.img, b.width, b.height, b.stride, &timestamp);
                     break;
                 default:
                     ALOGE("%s: Unknown/unsupported format %x, no output",
@@ -314,6 +312,15 @@
                     break;
             }
         }
+        if (timestamp != 0UL) {
+          mNextCaptureTime = timestamp;
+        }
+        // Note: we have to do this after the actual capture so that the
+        // capture time is accurate as reported from QEMU.
+        if (listener != nullptr) {
+            listener->onQemuSensorEvent(frameNumber, QemuSensorListener::EXPOSURE_START,
+                                        mNextCaptureTime);
+        }
     }
 
     ALOGVV("QemuSensor vertical blanking interval");
@@ -337,7 +344,7 @@
 };
 
 void QemuSensor::captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
-        uint32_t stride) {
+        uint32_t stride, int64_t *timestamp) {
     status_t res;
     if (width != mLastRequestWidth || height != mLastRequestHeight) {
         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
@@ -396,16 +403,16 @@
     // Read from webcam.
     mCameraQemuClient.queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
             whiteBalance[1], whiteBalance[2],
-            exposureCompensation);
+            exposureCompensation, timestamp);
 
     ALOGVV("RGBA sensor image captured");
 }
 
-void QemuSensor::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride) {
+void QemuSensor::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
     ALOGE("%s: Not implemented", __FUNCTION__);
 }
 
-void QemuSensor::captureNV21(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride) {
+void QemuSensor::captureNV21(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
     status_t res;
     if (width != mLastRequestWidth || height != mLastRequestHeight) {
         ALOGI("%s: Dimensions for the current request (%dx%d) differ "
@@ -464,7 +471,7 @@
     // Read video frame from webcam.
     mCameraQemuClient.queryFrame(img, nullptr, bufferSize, 0, whiteBalance[0],
             whiteBalance[1], whiteBalance[2],
-            exposureCompensation);
+            exposureCompensation, timestamp);
 
     ALOGVV("NV21 sensor image captured");
 }
diff --git a/camera/qemu-pipeline3/QemuSensor.h b/camera/qemu-pipeline3/QemuSensor.h
index c67296d..a867d8a 100644
--- a/camera/qemu-pipeline3/QemuSensor.h
+++ b/camera/qemu-pipeline3/QemuSensor.h
@@ -187,11 +187,11 @@
     Buffers *mNextCapturedBuffers;
 
     void captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride);
+            uint32_t stride, int64_t *timestamp);
     void captureRGB(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride);
+            uint32_t stride, int64_t *timestamp);
     void captureNV21(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride);
+            uint32_t stride, int64_t *timestamp);
 };
 
 }; // end of namespace android
diff --git a/sensors/sensors_qemu.c b/sensors/sensors_qemu.c
index 0120f12..8755b37 100644
--- a/sensors/sensors_qemu.c
+++ b/sensors/sensors_qemu.c
@@ -288,6 +288,10 @@
     int64_t event_time = -1;
     int ret = 0;
 
+    int64_t guest_event_time = -1;
+    int has_guest_event_time = 0;
+
+
     for (;;) {
         /* Release the lock since we're going to block on recv() */
         pthread_mutex_unlock(&dev->lock);
@@ -432,6 +436,15 @@
             continue;
         }
 
+        /* "guest-sync:<time>" is sent after a series of sensor events.
+         * where 'time' is expressed in micro-seconds and corresponds
+         * to the VM time when the real poll occured.
+         */
+        if (sscanf(buff, "guest-sync:%lld", &guest_event_time) == 1) {
+            has_guest_event_time = 1;
+            continue;
+        }
+
         /* "sync:<time>" is sent after a series of sensor events.
          * where 'time' is expressed in micro-seconds and corresponds
          * to the VM time when the real poll occured.
@@ -477,7 +490,8 @@
         while (new_sensors) {
             uint32_t i = 31 - __builtin_clz(new_sensors);
             new_sensors &= ~(1U << i);
-            dev->sensors[i].timestamp = t;
+            dev->sensors[i].timestamp =
+                    has_guest_event_time ? guest_event_time : t;
         }
     }
     return ret;
@@ -942,6 +956,11 @@
         dev->fd = -1;
         pthread_mutex_init(&dev->lock, NULL);
 
+        int64_t now = now_ns();
+        char command[64];
+        sprintf(command, "time:%lld", now);
+        sensor_device_send_command_locked(dev, command);
+
         *device = &dev->device.common;
         status  = 0;
     }