Add minimal operation to new fake camera emulator.
- Rework to match latest camera 2 HAL.
- Somewhat realistic scene lighting simulation, for allowing a
realistic range of exposure and gain values.
- Supports raw Bayer sensor data only, at 640x480
- Only one stream configurable
- No support for reprocessing, 3A, flash, YUV, JPEG, etc
Bug: 6243944
Change-Id: I14fbad58ffeff37162c56d9daff7e4f75ab879c3
diff --git a/tools/emulator/system/camera/Android.mk b/tools/emulator/system/camera/Android.mk
index c51f621..dc2e624 100755
--- a/tools/emulator/system/camera/Android.mk
+++ b/tools/emulator/system/camera/Android.mk
@@ -33,6 +33,7 @@
libjpeg \
libskia \
libandroid_runtime \
+ libcamera_metadata
LOCAL_C_INCLUDES += external/jpeg \
external/skia/include/core/ \
@@ -57,7 +58,10 @@
JpegCompressor.cpp \
EmulatedCamera2.cpp \
EmulatedFakeCamera2.cpp \
- EmulatedQemuCamera2.cpp
+ EmulatedQemuCamera2.cpp \
+ fake-pipeline2/Scene.cpp \
+ fake-pipeline2/Sensor.cpp
+
ifeq ($(TARGET_PRODUCT),vbox_x86)
LOCAL_MODULE := camera.vbox_x86
diff --git a/tools/emulator/system/camera/EmulatedCamera2.cpp b/tools/emulator/system/camera/EmulatedCamera2.cpp
index f7672f4..4106639 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera2.cpp
@@ -48,36 +48,10 @@
ops = &sDeviceOps;
priv = this;
- mRequestQueueDstOps.notify_queue_not_empty =
- EmulatedCamera2::request_queue_notify_queue_not_empty;
- mRequestQueueDstOps.parent = this;
+ mNotifyCb = NULL;
- mRequestQueueDstOps.notify_queue_not_empty =
- EmulatedCamera2::reprocess_queue_notify_queue_not_empty;
- mReprocessQueueDstOps.parent = this;
-
- mFrameQueueSrcOps.buffer_count = EmulatedCamera2::frame_queue_buffer_count;
- mFrameQueueSrcOps.dequeue = EmulatedCamera2::frame_queue_dequeue;
- mFrameQueueSrcOps.free = EmulatedCamera2::frame_queue_free;
- mFrameQueueSrcOps.parent = this;
-
- mReprocessStreamOps.dequeue_buffer =
- EmulatedCamera2::reprocess_stream_dequeue_buffer;
- mReprocessStreamOps.enqueue_buffer =
- EmulatedCamera2::reprocess_stream_enqueue_buffer;
- mReprocessStreamOps.cancel_buffer =
- EmulatedCamera2::reprocess_stream_cancel_buffer;
- mReprocessStreamOps.set_buffer_count =
- EmulatedCamera2::reprocess_stream_set_buffer_count;
- mReprocessStreamOps.set_crop = EmulatedCamera2::reprocess_stream_set_crop;
- mReprocessStreamOps.set_timestamp =
- EmulatedCamera2::reprocess_stream_set_timestamp;
- mReprocessStreamOps.set_usage = EmulatedCamera2::reprocess_stream_set_usage;
- mReprocessStreamOps.get_min_undequeued_buffer_count =
- EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count;
- mReprocessStreamOps.lock_buffer =
- EmulatedCamera2::reprocess_stream_lock_buffer;
- mReprocessStreamOps.parent = this;
+ mRequestQueueSrc = NULL;
+ mFrameQueueDst = NULL;
mVendorTagOps.get_camera_vendor_section_name =
EmulatedCamera2::get_camera_vendor_section_name;
@@ -109,6 +83,7 @@
***************************************************************************/
status_t EmulatedCamera2::connectCamera(hw_device_t** device) {
+ *device = &common;
return NO_ERROR;
}
@@ -117,126 +92,85 @@
}
status_t EmulatedCamera2::getCameraInfo(struct camera_info* info) {
-
return EmulatedBaseCamera::getCameraInfo(info);
}
/****************************************************************************
- * Camera API implementation.
+ * Camera Device API implementation.
* These methods are called from the camera API callback routines.
***************************************************************************/
/** Request input queue */
-int EmulatedCamera2::setRequestQueueSrcOps(
- camera2_metadata_queue_src_ops *request_queue_src_ops) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::requestQueueNotifyNotEmpty() {
- return NO_ERROR;
-}
-
-/** Reprocessing input queue */
-
-int EmulatedCamera2::setReprocessQueueSrcOps(
- camera2_metadata_queue_src_ops *reprocess_queue_src_ops) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessQueueNotifyNotEmpty() {
- return NO_ERROR;
-}
-
-/** Frame output queue */
-
-int EmulatedCamera2::setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::frameQueueBufferCount() {
- return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueDequeue(camera_metadata_t **buffer) {
- return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueFree(camera_metadata_t *old_buffer) {
- return NO_ERROR;
-}
-
-/** Notifications to application */
-int EmulatedCamera2::setNotifyCallback(camera2_notify_callback notify_cb) {
- return NO_ERROR;
+int EmulatedCamera2::requestQueueNotify() {
+ return INVALID_OPERATION;
}
/** Count of requests in flight */
int EmulatedCamera2::getInProgressCount() {
- return NO_ERROR;
+ return INVALID_OPERATION;
}
/** Cancel all captures in flight */
int EmulatedCamera2::flushCapturesInProgress() {
- return NO_ERROR;
+ return INVALID_OPERATION;
}
-/** Reprocessing input stream management */
-int EmulatedCamera2::reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
- int *stride) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamEnqueueBuffer(buffer_handle_t* buffer) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamCancelBuffer(buffer_handle_t* buffer) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetBufferCount(int count) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetCrop(int left, int top, int right, int bottom) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetTimestamp(int64_t timestamp) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetUsage(int usage) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetSwapInterval(int interval) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamGetMinUndequeuedBufferCount(int *count) {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamLockBuffer(buffer_handle_t *buffer) {
- return NO_ERROR;
+/** Construct a default request for a given use case */
+int EmulatedCamera2::constructDefaultRequest(
+ int request_template,
+ camera_metadata_t **request) {
+ return INVALID_OPERATION;
}
/** Output stream creation and management */
-int EmulatedCamera2::getStreamSlotCount() {
- return NO_ERROR;
-}
-
-int EmulatedCamera2::allocateStream(uint32_t stream_slot,
+int EmulatedCamera2::allocateStream(
uint32_t width,
uint32_t height,
int format,
- camera2_stream_ops_t *stream_ops) {
- return NO_ERROR;
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers) {
+ return INVALID_OPERATION;
}
-int EmulatedCamera2::releaseStream(uint32_t stream_slot) {
- return NO_ERROR;
+int EmulatedCamera2::registerStreamBuffers(
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers) {
+ return INVALID_OPERATION;
+}
+
+
+int EmulatedCamera2::releaseStream(uint32_t stream_id) {
+ return INVALID_OPERATION;
+}
+
+/** Reprocessing input stream management */
+
+int EmulatedCamera2::allocateReprocessStream(
+ uint32_t width,
+ uint32_t height,
+ uint32_t format,
+ camera2_stream_in_ops_t *reprocess_stream_ops,
+ uint32_t *stream_id,
+ uint32_t *consumer_usage,
+ uint32_t *max_buffers) {
+ return INVALID_OPERATION;
+}
+
+int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
+ return INVALID_OPERATION;
+}
+
+/** 3A triggering */
+
+int EmulatedCamera2::triggerAction(uint32_t trigger_id,
+ int ext1, int ext2) {
+ return INVALID_OPERATION;
}
/** Custom tag query methods */
@@ -253,14 +187,10 @@
return -1;
}
-/** Shutdown and debug methods */
-
-int EmulatedCamera2::release() {
- return NO_ERROR;
-}
+/** Debug methods */
int EmulatedCamera2::dump(int fd) {
- return NO_ERROR;
+ return INVALID_OPERATION;
}
/****************************************************************************
@@ -272,86 +202,28 @@
* hardware/libhardware/include/hardware/camera2.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
* dispatch the call into an instance of EmulatedCamera2 class defined by the
- * 'camera_device2' parameter.
+ * 'camera_device2' parameter, or set a member value in the same.
***************************************************************************/
int EmulatedCamera2::set_request_queue_src_ops(struct camera2_device *d,
- camera2_metadata_queue_src_ops *queue_src_ops) {
+ camera2_request_queue_src_ops *queue_src_ops) {
EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- return ec->setRequestQueueSrcOps(queue_src_ops);
-}
-
-int EmulatedCamera2::get_request_queue_dst_ops(struct camera2_device *d,
- camera2_metadata_queue_dst_ops **queue_dst_ops) {
- EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
- &ec->mRequestQueueDstOps);
+ ec->mRequestQueueSrc = queue_src_ops;
return NO_ERROR;
}
-int EmulatedCamera2::request_queue_notify_queue_not_empty(
- camera2_metadata_queue_dst_ops *q) {
- EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
- return ec->requestQueueNotifyNotEmpty();
-}
-
-int EmulatedCamera2::set_reprocess_queue_src_ops(struct camera2_device *d,
- camera2_metadata_queue_src_ops *queue_src_ops) {
+int EmulatedCamera2::notify_request_queue_not_empty(struct camera2_device *d) {
EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- return ec->setReprocessQueueSrcOps(queue_src_ops);
-}
-
-int EmulatedCamera2::get_reprocess_queue_dst_ops(struct camera2_device *d,
- camera2_metadata_queue_dst_ops **queue_dst_ops) {
- EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
- &ec->mReprocessQueueDstOps);
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocess_queue_notify_queue_not_empty(
- camera2_metadata_queue_dst_ops *q) {
- EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
- return ec->reprocessQueueNotifyNotEmpty();
+ return ec->requestQueueNotify();
}
int EmulatedCamera2::set_frame_queue_dst_ops(struct camera2_device *d,
- camera2_metadata_queue_dst_ops *queue_dst_ops) {
+ camera2_frame_queue_dst_ops *queue_dst_ops) {
EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- return ec->setFrameQueueDstOps(queue_dst_ops);
-}
-
-int EmulatedCamera2::get_frame_queue_src_ops(struct camera2_device *d,
- camera2_metadata_queue_src_ops **queue_src_ops) {
- EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- *queue_src_ops = static_cast<camera2_metadata_queue_src_ops*>(
- &ec->mFrameQueueSrcOps);
+ ec->mFrameQueueDst = queue_dst_ops;
return NO_ERROR;
}
-int EmulatedCamera2::frame_queue_buffer_count(camera2_metadata_queue_src_ops *q) {
- EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
- return ec->frameQueueBufferCount();
-}
-
-int EmulatedCamera2::frame_queue_dequeue(camera2_metadata_queue_src_ops *q,
- camera_metadata_t **buffer) {
- EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
- return ec->frameQueueDequeue(buffer);
-}
-
-int EmulatedCamera2::frame_queue_free(camera2_metadata_queue_src_ops *q,
- camera_metadata_t *old_buffer) {
- EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
- return ec->frameQueueFree(old_buffer);
-}
-
-int EmulatedCamera2::set_notify_callback(struct camera2_device *d,
- camera2_notify_callback notify_cb) {
- EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- return ec->setNotifyCallback(notify_cb);
-}
-
int EmulatedCamera2::get_in_progress_count(struct camera2_device *d) {
EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
return ec->getInProgressCount();
@@ -362,119 +234,80 @@
return ec->flushCapturesInProgress();
}
-int EmulatedCamera2::get_reprocess_stream_ops(camera2_device_t *d,
- camera2_stream_ops **stream) {
+int EmulatedCamera2::construct_default_request(struct camera2_device *d,
+ int request_template,
+ camera_metadata_t **request) {
EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
- *stream = static_cast<camera2_stream_ops*>(&ec->mReprocessStreamOps);
- return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocess_stream_dequeue_buffer(camera2_stream_ops *s,
- buffer_handle_t** buffer, int *stride) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamDequeueBuffer(buffer, stride);
-}
-
-int EmulatedCamera2::reprocess_stream_enqueue_buffer(camera2_stream_ops *s,
- buffer_handle_t* buffer) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamEnqueueBuffer(buffer);
-}
-
-int EmulatedCamera2::reprocess_stream_cancel_buffer(camera2_stream_ops *s,
- buffer_handle_t* buffer) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamCancelBuffer(buffer);
-}
-
-int EmulatedCamera2::reprocess_stream_set_buffer_count(camera2_stream_ops *s,
- int count) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamSetBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_set_crop(camera2_stream_ops *s,
- int left, int top, int right, int bottom) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamSetCrop(left, top, right, bottom);
-}
-
-int EmulatedCamera2::reprocess_stream_set_timestamp(camera2_stream_ops *s,
- int64_t timestamp) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamSetTimestamp(timestamp);
-}
-
-int EmulatedCamera2::reprocess_stream_set_usage(camera2_stream_ops *s,
- int usage) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamSetUsage(usage);
-}
-
-int EmulatedCamera2::reprocess_stream_set_swap_interval(camera2_stream_ops *s,
- int interval) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamSetSwapInterval(interval);
-}
-
-int EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count(
- const camera2_stream_ops *s,
- int *count) {
- EmulatedCamera2* ec = static_cast<const StreamOps*>(s)->parent;
- return ec->reprocessStreamGetMinUndequeuedBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_lock_buffer(camera2_stream_ops *s,
- buffer_handle_t* buffer) {
- EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
- return ec->reprocessStreamLockBuffer(buffer);
-}
-
-int EmulatedCamera2::get_stream_slot_count(struct camera2_device *d) {
- EmulatedCamera2* ec =
- static_cast<EmulatedCamera2*>(d);
- return ec->getStreamSlotCount();
+ return ec->constructDefaultRequest(request_template, request);
}
int EmulatedCamera2::allocate_stream(struct camera2_device *d,
- uint32_t stream_slot,
+ uint32_t width,
+ uint32_t height,
+ int format,
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers) {
+ EmulatedCamera2* ec =
+ static_cast<EmulatedCamera2*>(d);
+ return ec->allocateStream(width, height, format, stream_ops,
+ stream_id, format_actual, usage, max_buffers);
+}
+
+int EmulatedCamera2::register_stream_buffers(struct camera2_device *d,
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers) {
+ EmulatedCamera2* ec =
+ static_cast<EmulatedCamera2*>(d);
+ return ec->registerStreamBuffers(stream_id,
+ num_buffers,
+ buffers);
+}
+int EmulatedCamera2::release_stream(struct camera2_device *d,
+ uint32_t stream_id) {
+ EmulatedCamera2* ec =
+ static_cast<EmulatedCamera2*>(d);
+ return ec->releaseStream(stream_id);
+}
+
+int EmulatedCamera2::allocate_reprocess_stream(struct camera2_device *d,
uint32_t width,
uint32_t height,
uint32_t format,
- camera2_stream_ops_t *stream_ops) {
+ camera2_stream_in_ops_t *reprocess_stream_ops,
+ uint32_t *stream_id,
+ uint32_t *consumer_usage,
+ uint32_t *max_buffers) {
EmulatedCamera2* ec =
static_cast<EmulatedCamera2*>(d);
- return ec->allocateStream(stream_slot, width, height, format, stream_ops);
+ return ec->allocateReprocessStream(width, height, format,
+ reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
}
-int EmulatedCamera2::release_stream(struct camera2_device *d,
- uint32_t stream_slot) {
+int EmulatedCamera2::release_reprocess_stream(struct camera2_device *d,
+ uint32_t stream_id) {
EmulatedCamera2* ec =
static_cast<EmulatedCamera2*>(d);
- return ec->releaseStream(stream_slot);
+ return ec->releaseReprocessStream(stream_id);
}
-void EmulatedCamera2::release(struct camera2_device *d) {
- EmulatedCamera2* ec =
- static_cast<EmulatedCamera2*>(d);
- ec->release();
+int EmulatedCamera2::trigger_action(camera2_device_t *d,
+ uint32_t trigger_id,
+ int ext1,
+ int ext2) {
+ EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+ return ec->triggerAction(trigger_id, ext1, ext2);
}
-int EmulatedCamera2::dump(struct camera2_device *d, int fd) {
- EmulatedCamera2* ec =
- static_cast<EmulatedCamera2*>(d);
- return ec->dump(fd);
-}
-
-int EmulatedCamera2::close(struct hw_device_t* device) {
- EmulatedCamera2* ec =
- static_cast<EmulatedCamera2*>(
- reinterpret_cast<struct camera2_device*>(device) );
- if (ec == NULL) {
- ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
- return -EINVAL;
- }
- return ec->closeCamera();
+int EmulatedCamera2::set_notify_callback(struct camera2_device *d,
+ camera2_notify_callback notify_cb, void* user) {
+ EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+ ec->mNotifyCb = notify_cb;
+ ec->mNotifyUserPtr = user;
+ return NO_ERROR;
}
int EmulatedCamera2::get_metadata_vendor_tag_ops(struct camera2_device *d,
@@ -506,22 +339,38 @@
return ec->getVendorTagType(tag);
}
+int EmulatedCamera2::dump(struct camera2_device *d, int fd) {
+ EmulatedCamera2* ec =
+ static_cast<EmulatedCamera2*>(d);
+ return ec->dump(fd);
+}
+
+int EmulatedCamera2::close(struct hw_device_t* device) {
+ EmulatedCamera2* ec =
+ static_cast<EmulatedCamera2*>(
+ reinterpret_cast<struct camera2_device*>(device) );
+ if (ec == NULL) {
+ ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
+ return -EINVAL;
+ }
+ return ec->closeCamera();
+}
+
camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
EmulatedCamera2::set_request_queue_src_ops,
- EmulatedCamera2::get_request_queue_dst_ops,
- EmulatedCamera2::set_reprocess_queue_src_ops,
- EmulatedCamera2::get_reprocess_queue_dst_ops,
+ EmulatedCamera2::notify_request_queue_not_empty,
EmulatedCamera2::set_frame_queue_dst_ops,
- EmulatedCamera2::get_frame_queue_src_ops,
- EmulatedCamera2::set_notify_callback,
EmulatedCamera2::get_in_progress_count,
EmulatedCamera2::flush_captures_in_progress,
- EmulatedCamera2::get_reprocess_stream_ops,
- EmulatedCamera2::get_stream_slot_count,
+ EmulatedCamera2::construct_default_request,
EmulatedCamera2::allocate_stream,
+ EmulatedCamera2::register_stream_buffers,
EmulatedCamera2::release_stream,
+ EmulatedCamera2::allocate_reprocess_stream,
+ EmulatedCamera2::release_reprocess_stream,
+ EmulatedCamera2::trigger_action,
+ EmulatedCamera2::set_notify_callback,
EmulatedCamera2::get_metadata_vendor_tag_ops,
- EmulatedCamera2::release,
EmulatedCamera2::dump
};
diff --git a/tools/emulator/system/camera/EmulatedCamera2.h b/tools/emulator/system/camera/EmulatedCamera2.h
index feeadf9..17082b7 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.h
+++ b/tools/emulator/system/camera/EmulatedCamera2.h
@@ -67,7 +67,7 @@
virtual status_t Initialize();
/****************************************************************************
- * Camera API implementation
+ * Camera module API and generic hardware device API implementation
***************************************************************************/
public:
@@ -75,7 +75,7 @@
virtual status_t closeCamera();
- virtual status_t getCameraInfo(struct camera_info* info);
+ virtual status_t getCameraInfo(struct camera_info* info) = 0;
/****************************************************************************
* Camera API implementation.
@@ -83,176 +83,133 @@
***************************************************************************/
protected:
- /** Request input queue */
-
- int setRequestQueueSrcOps(
- camera2_metadata_queue_src_ops *request_queue_src_ops);
-
- int requestQueueNotifyNotEmpty();
-
- /** Reprocessing input queue */
-
- int setReprocessQueueSrcOps(
- camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
-
- int reprocessQueueNotifyNotEmpty();
-
- /** Frame output queue */
-
- int setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops);
-
- int frameQueueBufferCount();
- int frameQueueDequeue(camera_metadata_t **buffer);
- int frameQueueFree(camera_metadata_t *old_buffer);
-
- /** Notifications to application */
- int setNotifyCallback(camera2_notify_callback notify_cb);
+ /** Request input queue notification */
+ virtual int requestQueueNotify();
/** Count of requests in flight */
- int getInProgressCount();
+ virtual int getInProgressCount();
/** Cancel all captures in flight */
- int flushCapturesInProgress();
+ virtual int flushCapturesInProgress();
- /** Reprocessing input stream management */
- int reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
- int *stride);
-
- int reprocessStreamEnqueueBuffer(buffer_handle_t* buffer);
-
- int reprocessStreamCancelBuffer(buffer_handle_t* buffer);
-
- int reprocessStreamSetBufferCount(int count);
-
- int reprocessStreamSetCrop(int left, int top, int right, int bottom);
-
- int reprocessStreamSetTimestamp(int64_t timestamp);
-
- int reprocessStreamSetUsage(int usage);
-
- int reprocessStreamSetSwapInterval(int interval);
-
- int reprocessStreamGetMinUndequeuedBufferCount(int *count);
-
- int reprocessStreamLockBuffer(buffer_handle_t *buffer);
+ virtual int constructDefaultRequest(
+ int request_template,
+ camera_metadata_t **request);
/** Output stream creation and management */
-
- int getStreamSlotCount();
-
- int allocateStream(uint32_t stream_slot,
+ virtual int allocateStream(
uint32_t width,
uint32_t height,
int format,
- camera2_stream_ops_t *stream_ops);
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers);
- int releaseStream(uint32_t stream_slot);
+ virtual int registerStreamBuffers(
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers);
+
+ virtual int releaseStream(uint32_t stream_id);
+
+ /** Input stream creation and management */
+ virtual int allocateReprocessStream(
+ uint32_t width,
+ uint32_t height,
+ uint32_t format,
+ camera2_stream_in_ops_t *reprocess_stream_ops,
+ uint32_t *stream_id,
+ uint32_t *consumer_usage,
+ uint32_t *max_buffers);
+
+ virtual int releaseReprocessStream(uint32_t stream_id);
+
+ /** 3A action triggering */
+ virtual int triggerAction(uint32_t trigger_id,
+ int ext1, int ext2);
/** Custom tag definitions */
- const char* getVendorSectionName(uint32_t tag);
- const char* getVendorTagName(uint32_t tag);
- int getVendorTagType(uint32_t tag);
+ virtual const char* getVendorSectionName(uint32_t tag);
+ virtual const char* getVendorTagName(uint32_t tag);
+ virtual int getVendorTagType(uint32_t tag);
- /** Shutdown and debug methods */
+ /** Debug methods */
- int release();
-
- int dump(int fd);
-
- int close();
+ virtual int dump(int fd);
/****************************************************************************
* Camera API callbacks as defined by camera2_device_ops structure. See
* hardware/libhardware/include/hardware/camera2.h for information on each
* of these callbacks. Implemented in this class, these callbacks simply
- * dispatch the call into an instance of EmulatedCamera2 class defined in the
- * 'camera_device2' parameter.
+ * dispatch the call into an instance of EmulatedCamera2 class defined in
+ * the 'camera_device2' parameter.
***************************************************************************/
private:
/** Input request queue */
static int set_request_queue_src_ops(camera2_device_t *,
- camera2_metadata_queue_src_ops *queue_src_ops);
- static int get_request_queue_dst_ops(camera2_device_t *,
- camera2_metadata_queue_dst_ops **queue_dst_ops);
- // for get_request_queue_dst_ops
- static int request_queue_notify_queue_not_empty(
- camera2_metadata_queue_dst_ops *);
-
- /** Input reprocess queue */
- static int set_reprocess_queue_src_ops(camera2_device_t *,
- camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
- static int get_reprocess_queue_dst_ops(camera2_device_t *,
- camera2_metadata_queue_dst_ops **queue_dst_ops);
- // for reprocess_queue_dst_ops
- static int reprocess_queue_notify_queue_not_empty(
- camera2_metadata_queue_dst_ops *);
+ camera2_request_queue_src_ops *queue_src_ops);
+ static int notify_request_queue_not_empty(camera2_device_t *);
/** Output frame queue */
static int set_frame_queue_dst_ops(camera2_device_t *,
- camera2_metadata_queue_dst_ops *queue_dst_ops);
- static int get_frame_queue_src_ops(camera2_device_t *,
- camera2_metadata_queue_src_ops **queue_src_ops);
- // for get_frame_queue_src_ops
- static int frame_queue_buffer_count(camera2_metadata_queue_src_ops *);
- static int frame_queue_dequeue(camera2_metadata_queue_src_ops *,
- camera_metadata_t **buffer);
- static int frame_queue_free(camera2_metadata_queue_src_ops *,
- camera_metadata_t *old_buffer);
-
- /** Notifications to application */
- static int set_notify_callback(camera2_device_t *,
- camera2_notify_callback notify_cb);
+ camera2_frame_queue_dst_ops *queue_dst_ops);
/** In-progress request management */
static int get_in_progress_count(camera2_device_t *);
static int flush_captures_in_progress(camera2_device_t *);
- /** Input reprocessing stream */
- static int get_reprocess_stream_ops(camera2_device_t *,
- camera2_stream_ops_t **stream);
- // for get_reprocess_stream_ops
- static int reprocess_stream_dequeue_buffer(camera2_stream_ops *,
- buffer_handle_t** buffer, int *stride);
- static int reprocess_stream_enqueue_buffer(camera2_stream_ops *,
- buffer_handle_t* buffer);
- static int reprocess_stream_cancel_buffer(camera2_stream_ops *,
- buffer_handle_t* buffer);
- static int reprocess_stream_set_buffer_count(camera2_stream_ops *,
- int count);
- static int reprocess_stream_set_crop(camera2_stream_ops *,
- int left, int top, int right, int bottom);
- static int reprocess_stream_set_timestamp(camera2_stream_ops *,
- int64_t timestamp);
- static int reprocess_stream_set_usage(camera2_stream_ops *,
- int usage);
- static int reprocess_stream_set_swap_interval(camera2_stream_ops *,
- int interval);
- static int reprocess_stream_get_min_undequeued_buffer_count(
- const camera2_stream_ops *,
- int *count);
- static int reprocess_stream_lock_buffer(camera2_stream_ops *,
- buffer_handle_t* buffer);
+ /** Request template creation */
+ static int construct_default_request(camera2_device_t *,
+ int request_template,
+ camera_metadata_t **request);
- /** Output stream allocation and management */
-
- static int get_stream_slot_count(camera2_device_t *);
-
+ /** Stream management */
static int allocate_stream(camera2_device_t *,
- uint32_t stream_slot,
+ uint32_t width,
+ uint32_t height,
+ int format,
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers);
+
+ static int register_stream_buffers(camera2_device_t *,
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers);
+
+ static int release_stream(camera2_device_t *,
+ uint32_t stream_id);
+
+ static int allocate_reprocess_stream(camera2_device_t *,
uint32_t width,
uint32_t height,
uint32_t format,
- camera2_stream_ops_t *stream_ops);
+ camera2_stream_in_ops_t *reprocess_stream_ops,
+ uint32_t *stream_id,
+ uint32_t *consumer_usage,
+ uint32_t *max_buffers);
- static int release_stream(camera2_device_t *,
- uint32_t stream_slot);
+ static int release_reprocess_stream(camera2_device_t *,
+ uint32_t stream_id);
- static void release(camera2_device_t *);
+ /** 3A triggers*/
+ static int trigger_action(camera2_device_t *,
+ uint32_t trigger_id,
+ int ext1,
+ int ext2);
+
+ /** Notifications to application */
+ static int set_notify_callback(camera2_device_t *,
+ camera2_notify_callback notify_cb,
+ void *user);
/** Vendor metadata registration */
-
static int get_metadata_vendor_tag_ops(camera2_device_t *,
vendor_tag_query_ops_t **ops);
// for get_metadata_vendor_tag_ops
@@ -268,36 +225,28 @@
static int dump(camera2_device_t *, int fd);
+ /** For hw_device_t ops */
static int close(struct hw_device_t* device);
/****************************************************************************
- * Data members
+ * Data members shared with implementations
***************************************************************************/
-
- private:
- static camera2_device_ops_t sDeviceOps;
-
- struct QueueDstOps : public camera2_metadata_queue_dst_ops {
- EmulatedCamera2 *parent;
- };
-
- struct QueueSrcOps : public camera2_metadata_queue_src_ops {
- EmulatedCamera2 *parent;
- };
-
- struct StreamOps : public camera2_stream_ops {
- EmulatedCamera2 *parent;
- };
+ protected:
+ camera2_request_queue_src_ops *mRequestQueueSrc;
+ camera2_frame_queue_dst_ops *mFrameQueueDst;
+ camera2_notify_callback mNotifyCb;
+ void* mNotifyUserPtr;
struct TagOps : public vendor_tag_query_ops {
EmulatedCamera2 *parent;
};
-
- QueueDstOps mRequestQueueDstOps;
- QueueDstOps mReprocessQueueDstOps;
- QueueSrcOps mFrameQueueSrcOps;
- StreamOps mReprocessStreamOps;
TagOps mVendorTagOps;
+
+ /****************************************************************************
+ * Data members
+ ***************************************************************************/
+ private:
+ static camera2_device_ops_t sDeviceOps;
};
}; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.cpp b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
index 2960751..48d1d99 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
@@ -75,11 +75,13 @@
switch (getBackCameraHalVersion()) {
case 1:
mEmulatedCameras[camera_id] =
- new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+ new EmulatedFakeCamera(camera_id, true,
+ &HAL_MODULE_INFO_SYM.common);
break;
case 2:
mEmulatedCameras[camera_id] =
- new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+ new EmulatedFakeCamera2(camera_id, true,
+ &HAL_MODULE_INFO_SYM.common);
break;
default:
ALOGE("%s: Unknown back camera hal version requested: %d", __FUNCTION__,
@@ -90,10 +92,10 @@
getBackCameraHalVersion());
if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
delete mEmulatedCameras[camera_id];
- mEmulatedCameras--;
+ mEmulatedCameraNum--;
}
} else {
- mEmulatedCameras--;
+ mEmulatedCameraNum--;
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
}
}
@@ -121,14 +123,17 @@
switch (getFrontCameraHalVersion()) {
case 1:
mEmulatedCameras[camera_id] =
- new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+ new EmulatedFakeCamera(camera_id, false,
+ &HAL_MODULE_INFO_SYM.common);
break;
case 2:
mEmulatedCameras[camera_id] =
- new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+ new EmulatedFakeCamera2(camera_id, false,
+ &HAL_MODULE_INFO_SYM.common);
break;
default:
- ALOGE("%s: Unknown front camera hal version requested: %d", __FUNCTION__,
+ ALOGE("%s: Unknown front camera hal version requested: %d",
+ __FUNCTION__,
getFrontCameraHalVersion());
}
if (mEmulatedCameras[camera_id] != NULL) {
@@ -136,10 +141,10 @@
getFrontCameraHalVersion());
if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
delete mEmulatedCameras[camera_id];
- mEmulatedCameras--;
+ mEmulatedCameraNum--;
}
} else {
- mEmulatedCameras--;
+ mEmulatedCameraNum--;
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
}
}
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index aa62244..5291deb 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -21,13 +21,32 @@
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_FakeCamera2"
-#include <cutils/log.h>
-#include <cutils/properties.h>
+#include <utils/Log.h>
+
#include "EmulatedFakeCamera2.h"
#include "EmulatedCameraFactory.h"
+#include <ui/Rect.h>
+#include <ui/GraphicBufferMapper.h>
namespace android {
+const uint32_t EmulatedFakeCamera2::kAvailableFormats[1] = {
+ HAL_PIXEL_FORMAT_RAW_SENSOR
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableSizesPerFormat[1] = {
+ 1
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableSizes[2] = {
+ 640, 480
+ // Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableMinFrameDurations[1] = {
+ Sensor::kFrameDurationRange[0]
+};
+
EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
bool facingBack,
struct hw_module_t* module)
@@ -38,17 +57,711 @@
facingBack ? "back" : "front");
}
-EmulatedFakeCamera2::~EmulatedFakeCamera2()
-{
+EmulatedFakeCamera2::~EmulatedFakeCamera2() {
+ if (mCameraInfo != NULL) {
+ free_camera_metadata(mCameraInfo);
+ }
}
/****************************************************************************
* Public API overrides
***************************************************************************/
-status_t EmulatedFakeCamera2::Initialize()
-{
+status_t EmulatedFakeCamera2::Initialize() {
+ status_t res;
+ mCameraInfo = allocate_camera_metadata(10,100);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_EXPOSURE_TIME_RANGE,
+ Sensor::kExposureTimeRange, 2);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_MAX_FRAME_DURATION,
+ &Sensor::kFrameDurationRange[1], 1);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_AVAILABLE_SENSITIVITIES,
+ Sensor::kAvailableSensitivities,
+ sizeof(Sensor::kAvailableSensitivities)
+ /sizeof(uint32_t));
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_COLOR_FILTER_ARRANGEMENT,
+ &Sensor::kColorFilterArrangement, 1);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_PIXEL_ARRAY_SIZE,
+ Sensor::kResolution, 2);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SENSOR_ACTIVE_ARRAY_SIZE,
+ Sensor::kResolution, 2);
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SCALER_AVAILABLE_FORMATS,
+ kAvailableFormats,
+ sizeof(kAvailableFormats)/sizeof(uint32_t));
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SCALER_AVAILABLE_SIZES_PER_FORMAT,
+ kAvailableSizesPerFormat,
+ sizeof(kAvailableSizesPerFormat)/sizeof(uint32_t));
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SCALER_AVAILABLE_SIZES,
+ kAvailableSizes,
+ sizeof(kAvailableSizes)/sizeof(uint32_t));
+
+ res = add_camera_metadata_entry(mCameraInfo,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ kAvailableMinFrameDurations,
+ sizeof(kAvailableMinFrameDurations)/sizeof(uint32_t));
+
+ // TODO: Add all the others
+
return NO_ERROR;
}
+/****************************************************************************
+ * Camera module API overrides
+ ***************************************************************************/
+
+status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
+ status_t res;
+ ALOGV("%s", __FUNCTION__);
+
+ mConfigureThread = new ConfigureThread(this);
+ mReadoutThread = new ReadoutThread(this);
+ mSensor = new Sensor();
+
+ mNextStreamId = 0;
+ mRawStreamOps = NULL;
+
+ res = mSensor->startUp();
+ if (res != NO_ERROR) return res;
+
+ res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
+ if (res != NO_ERROR) return res;
+
+ res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
+ if (res != NO_ERROR) return res;
+
+ return EmulatedCamera2::connectCamera(device);
+}
+
+status_t EmulatedFakeCamera2::closeCamera() {
+ Mutex::Autolock l(mMutex);
+
+ status_t res;
+ ALOGV("%s", __FUNCTION__);
+
+ res = mSensor->shutDown();
+ if (res != NO_ERROR) {
+ ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ mConfigureThread->requestExit();
+ mReadoutThread->requestExit();
+
+ mConfigureThread->join();
+ mReadoutThread->join();
+
+ ALOGV("%s exit", __FUNCTION__);
+ return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
+ info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+ info->orientation = 0;
+ return EmulatedCamera2::getCameraInfo(info);
+}
+
+/****************************************************************************
+ * Camera device API overrides
+ ***************************************************************************/
+
+/** Request input queue */
+
+int EmulatedFakeCamera2::requestQueueNotify() {
+ ALOGV("Request queue notification received");
+
+ ALOG_ASSERT(mRequestQueueSrc != NULL,
+ "%s: Request queue src not set, but received queue notification!",
+ __FUNCTION__);
+ ALOG_ASSERT(mFrameQueueDst != NULL,
+ "%s: Request queue src not set, but received queue notification!",
+ __FUNCTION__);
+ ALOG_ASSERT(mRawStreamOps != NULL,
+ "%s: No raw stream allocated, but received queue notification!",
+ __FUNCTION__);
+ return mConfigureThread->newRequestAvailable();
+}
+
+int EmulatedFakeCamera2::allocateStream(
+ uint32_t width,
+ uint32_t height,
+ int format,
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers) {
+ Mutex::Autolock l(mMutex);
+
+ if (mNextStreamId > 0) {
+ // TODO: Support more than one stream
+ ALOGW("%s: Only one stream supported", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
+ unsigned int formatIdx = 0;
+ unsigned int sizeOffsetIdx = 0;
+ for (; formatIdx < numFormats; formatIdx++) {
+ if (format == (int)kAvailableFormats[formatIdx]) break;
+ sizeOffsetIdx += kAvailableSizesPerFormat[formatIdx];
+ }
+
+ if (formatIdx == numFormats) {
+ ALOGW("%s: Format 0x%x is not supported", __FUNCTION__, format);
+ return BAD_VALUE;
+ }
+ unsigned int resIdx = 0;
+
+ for (; resIdx < kAvailableSizesPerFormat[formatIdx]; resIdx++) {
+ uint32_t widthMatch = kAvailableSizes[ (sizeOffsetIdx + resIdx)*2 + 0];
+ uint32_t heightMatch = kAvailableSizes[ (sizeOffsetIdx + resIdx)*2 + 1];
+ if (width == widthMatch && height == heightMatch) break;
+ }
+ if (resIdx == kAvailableSizesPerFormat[formatIdx]) {
+ ALOGW("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
+ format, width, height);
+ return BAD_VALUE;
+ }
+
+ // TODO: Generalize below to work for variable types of streams, etc.
+ // Currently only correct for raw sensor format, sensor resolution.
+
+ ALOG_ASSERT(format == HAL_PIXEL_FORMAT_RAW_SENSOR,
+ "%s: TODO: Only supporting raw sensor format right now", __FUNCTION__);
+ ALOG_ASSERT(width == Sensor::kResolution[0],
+ "%s: TODO: Only supporting raw sensor size right now", __FUNCTION__);
+ ALOG_ASSERT(height == Sensor::kResolution[1],
+ "%s: TODO: Only supporting raw sensor size right now", __FUNCTION__);
+
+ mRawStreamOps = stream_ops;
+
+ *stream_id = mNextStreamId;
+ if (format_actual) *format_actual = format;
+ *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
+ *max_buffers = 4;
+
+ ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
+ *stream_id, width, height, format, *usage, *max_buffers);
+
+ mNextStreamId++;
+ return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::registerStreamBuffers(
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers) {
+ // Emulator doesn't need to register these with V4L2, etc.
+ ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
+ stream_id, num_buffers);
+ return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
+ Mutex::Autolock l(mMutex);
+ ALOG_ASSERT(stream_id == 0,
+ "%s: TODO: Only one stream supported", __FUNCTION__);
+
+ // TODO: Need to clean up better than this - in-flight buffers likely
+ mRawStreamOps = NULL;
+
+ return NO_ERROR;
+}
+
+/** Custom tag definitions */
+
+// Emulator camera metadata sections
+enum {
+ EMULATOR_SCENE = VENDOR_SECTION,
+ END_EMULATOR_SECTIONS
+};
+
+enum {
+ EMULATOR_SCENE_START = EMULATOR_SCENE << 16,
+};
+
+// Emulator camera metadata tags
+enum {
+ // Hour of day to use for lighting calculations (0-23). Default: 12
+ EMULATOR_SCENE_HOUROFDAY = EMULATOR_SCENE_START,
+ EMULATOR_SCENE_END
+};
+
+unsigned int emulator_metadata_section_bounds[END_EMULATOR_SECTIONS -
+ VENDOR_SECTION][2] = {
+ { EMULATOR_SCENE_START, EMULATOR_SCENE_END }
+};
+
+const char *emulator_metadata_section_names[END_EMULATOR_SECTIONS -
+ VENDOR_SECTION] = {
+ "com.android.emulator.scene"
+};
+
+typedef struct emulator_tag_info {
+ const char *tag_name;
+ uint8_t tag_type;
+} emulator_tag_info_t;
+
+emulator_tag_info_t emulator_scene[EMULATOR_SCENE_END - EMULATOR_SCENE_START] = {
+ { "hourOfDay", TYPE_INT32 }
+};
+
+emulator_tag_info_t *tag_info[END_EMULATOR_SECTIONS -
+ VENDOR_SECTION] = {
+ emulator_scene
+};
+
+const char* EmulatedFakeCamera2::getVendorSectionName(uint32_t tag) {
+ ALOGV("%s", __FUNCTION__);
+ uint32_t section = tag >> 16;
+ if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+ return emulator_metadata_section_names[section - VENDOR_SECTION];
+}
+
+const char* EmulatedFakeCamera2::getVendorTagName(uint32_t tag) {
+ ALOGV("%s", __FUNCTION__);
+ uint32_t section = tag >> 16;
+ if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+ uint32_t section_index = section - VENDOR_SECTION;
+ if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+ return NULL;
+ }
+ uint32_t tag_index = tag & 0xFFFF;
+ return tag_info[section_index][tag_index].tag_name;
+}
+
+int EmulatedFakeCamera2::getVendorTagType(uint32_t tag) {
+ ALOGV("%s", __FUNCTION__);
+ uint32_t section = tag >> 16;
+ if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return -1;
+ uint32_t section_index = section - VENDOR_SECTION;
+ if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+ return -1;
+ }
+ uint32_t tag_index = tag & 0xFFFF;
+ return tag_info[section_index][tag_index].tag_type;
+}
+
+/** Shutdown and debug methods */
+
+int EmulatedFakeCamera2::dump(int fd) {
+ return NO_ERROR;
+}
+
+void EmulatedFakeCamera2::signalError() {
+ // TODO: Let parent know so we can shut down cleanly
+ ALOGE("Worker thread is signaling a serious error");
+}
+
+/** Pipeline control worker thread methods */
+
+EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
+ Thread(false),
+ mParent(parent) {
+ mRunning = false;
+}
+
+EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
+ Mutex::Autolock lock(mInputMutex);
+
+ ALOGV("Starting up ConfigureThread");
+ mRequest = NULL;
+ mActive = false;
+ mRunning = true;
+
+ mInputSignal.signal();
+ return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
+ Mutex::Autolock lock(mInputMutex);
+ if (!mRunning) {
+ ALOGV("Waiting for configure thread to start");
+ mInputSignal.wait(mInputMutex);
+ }
+ return OK;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
+ waitUntilRunning();
+
+ Mutex::Autolock lock(mInputMutex);
+
+ mActive = true;
+ mInputSignal.signal();
+
+ return OK;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
+ static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+ status_t res;
+
+ // Check if we're currently processing or just waiting
+ {
+ Mutex::Autolock lock(mInputMutex);
+ if (!mActive) {
+ // Inactive, keep waiting until we've been signaled
+ status_t res;
+ res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+ if (res != NO_ERROR && res != TIMED_OUT) {
+ ALOGE("%s: Error waiting for input requests: %d",
+ __FUNCTION__, res);
+ return false;
+ }
+ if (!mActive) return true;
+ ALOGV("New request available");
+ }
+ // Active
+ }
+ if (mRequest == NULL) {
+ ALOGV("Getting next request");
+ res = mParent->mRequestQueueSrc->dequeue_request(
+ mParent->mRequestQueueSrc,
+ &mRequest);
+ if (res != NO_ERROR) {
+ ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
+ mParent->signalError();
+ return false;
+ }
+ if (mRequest == NULL) {
+ ALOGV("Request queue empty, going inactive");
+ // No requests available, go into inactive mode
+ Mutex::Autolock lock(mInputMutex);
+ mActive = false;
+ return true;
+ }
+ // Get necessary parameters for sensor config
+
+ sort_camera_metadata(mRequest);
+
+ uint8_t *streams;
+ size_t streamCount;
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_REQUEST_OUTPUT_STREAMS,
+ NULL,
+ (void**)&streams,
+ &streamCount);
+ if (res != NO_ERROR) {
+ ALOGE("%s: error reading output stream tag", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+ // TODO: Only raw stream supported
+ if (streamCount != 1 || streams[0] != 0) {
+ ALOGE("%s: TODO: Only raw stream supported", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_REQUEST_FRAME_COUNT,
+ NULL,
+ (void**)&mNextFrameNumber,
+ NULL);
+ if (res != NO_ERROR) {
+ ALOGE("%s: error reading frame count tag", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_SENSOR_EXPOSURE_TIME,
+ NULL,
+ (void**)&mNextExposureTime,
+ NULL);
+ if (res != NO_ERROR) {
+ ALOGE("%s: error reading exposure time tag", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_SENSOR_FRAME_DURATION,
+ NULL,
+ (void**)&mNextFrameDuration,
+ NULL);
+ if (res != NO_ERROR) {
+ ALOGE("%s: error reading frame duration tag", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+ if (*mNextFrameDuration <
+ *mNextExposureTime + Sensor::kMinVerticalBlank) {
+ *mNextFrameDuration = *mNextExposureTime + Sensor::kMinVerticalBlank;
+ }
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_SENSOR_SENSITIVITY,
+ NULL,
+ (void**)&mNextSensitivity,
+ NULL);
+ if (res != NO_ERROR) {
+ ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+
+ uint32_t *hourOfDay;
+ res = find_camera_metadata_entry(mRequest,
+ EMULATOR_SCENE_HOUROFDAY,
+ NULL,
+ (void**)&hourOfDay,
+ NULL);
+ if (res == NO_ERROR) {
+ ALOGV("Setting hour: %d", *hourOfDay);
+ mParent->mSensor->getScene().setHour(*hourOfDay);
+ }
+
+ // TODO: Fetch stride from gralloc
+ mNextBufferStride = Sensor::kResolution[0];
+
+ // Start waiting on sensor
+ ALOGV("Waiting for sensor");
+ }
+ bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
+
+ if (vsync) {
+ ALOGV("Configuring sensor for frame %d", *mNextFrameNumber);
+ mParent->mSensor->setExposureTime(*mNextExposureTime);
+ mParent->mSensor->setFrameDuration(*mNextFrameDuration);
+ mParent->mSensor->setSensitivity(*mNextSensitivity);
+
+ /** Get buffer to fill for this frame */
+ // TODO: Only does raw stream
+
+ /* Get next buffer from raw stream */
+ mNextBuffer = NULL;
+ res = mParent->mRawStreamOps->dequeue_buffer(mParent->mRawStreamOps,
+ &mNextBuffer);
+ if (res != NO_ERROR || mNextBuffer == NULL) {
+ ALOGE("%s: Unable to dequeue buffer from stream %d: %d",
+ __FUNCTION__, 0, res);
+ mParent->signalError();
+ return false;
+ }
+
+ /* Lock the buffer from the perspective of the graphics mapper */
+ uint8_t *img;
+ const Rect rect(Sensor::kResolution[0], Sensor::kResolution[1]);
+
+ res = GraphicBufferMapper::get().lock(*mNextBuffer,
+ GRALLOC_USAGE_SW_WRITE_OFTEN,
+ rect, (void**)&img);
+
+ if (res != NO_ERROR) {
+ ALOGE("%s: grbuffer_mapper.lock failure: %d", __FUNCTION__, res);
+ mParent->mRawStreamOps->cancel_buffer(mParent->mRawStreamOps,
+ mNextBuffer);
+ mParent->signalError();
+ return false;
+ }
+ mParent->mSensor->setDestinationBuffer(img, mNextBufferStride);
+ mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffer);
+
+ mRequest = NULL;
+ }
+
+ return true;
+}
+
+EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
+ Thread(false),
+ mParent(parent),
+ mRunning(false),
+ mActive(false),
+ mRequest(NULL),
+ mBuffer(NULL)
+{
+ mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
+ mInFlightHead = 0;
+ mInFlightTail = 0;
+}
+
+EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
+ delete mInFlightQueue;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
+ Mutex::Autolock lock(mInputMutex);
+ ALOGV("Starting up ReadoutThread");
+ mRunning = true;
+ mInputSignal.signal();
+ return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
+ Mutex::Autolock lock(mInputMutex);
+ if (!mRunning) {
+ ALOGV("Waiting for readout thread to start");
+ mInputSignal.wait(mInputMutex);
+ }
+ return OK;
+}
+
+void EmulatedFakeCamera2::ReadoutThread::setNextCapture(camera_metadata_t *request,
+ buffer_handle_t *buffer) {
+ Mutex::Autolock lock(mInputMutex);
+ if ( (mInFlightTail + 1) % kInFlightQueueSize == mInFlightHead) {
+ ALOGE("In flight queue full, dropping captures");
+ mParent->signalError();
+ return;
+ }
+ mInFlightQueue[mInFlightTail].request = request;
+ mInFlightQueue[mInFlightTail].buffer = buffer;
+ mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
+
+ if (!mActive) {
+ mActive = true;
+ mInputSignal.signal();
+ }
+}
+
+bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
+ static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+ status_t res;
+
+ // Check if we're currently processing or just waiting
+ {
+ Mutex::Autolock lock(mInputMutex);
+ if (!mActive) {
+ // Inactive, keep waiting until we've been signaled
+ res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+ if (res != NO_ERROR && res != TIMED_OUT) {
+ ALOGE("%s: Error waiting for capture requests: %d",
+ __FUNCTION__, res);
+ mParent->signalError();
+ return false;
+ }
+ if (!mActive) return true;
+ }
+ // Active, see if we need a new request
+ if (mRequest == NULL) {
+ if (mInFlightHead == mInFlightTail) {
+ // Go inactive
+ ALOGV("Waiting for sensor data");
+ mActive = false;
+ return true;
+ } else {
+ mRequest = mInFlightQueue[mInFlightHead].request;
+ mBuffer = mInFlightQueue[mInFlightHead].buffer;
+ mInFlightQueue[mInFlightHead].request = NULL;
+ mInFlightQueue[mInFlightHead].buffer = NULL;
+ mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
+ }
+ }
+ }
+
+ // Active with request, wait on sensor to complete
+
+ nsecs_t captureTime;
+
+ bool gotFrame;
+ gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
+ &captureTime);
+
+ if (!gotFrame) return true;
+
+ // Got sensor data, construct frame and send it out
+ ALOGV("Readout: Constructing metadata and frames");
+
+ uint8_t *metadata_mode;
+ res = find_camera_metadata_entry(mRequest,
+ ANDROID_REQUEST_METADATA_MODE,
+ NULL, (void**)&metadata_mode, NULL);
+
+ if (*metadata_mode == ANDROID_REQUEST_METADATA_FULL) {
+ ALOGV("Metadata requested, constructing");
+
+ camera_metadata_t *frame = NULL;
+
+ size_t frame_entries = get_camera_metadata_entry_count(mRequest);
+ size_t frame_data = get_camera_metadata_data_count(mRequest);
+
+ frame_entries += 2;
+ frame_data += 8;
+
+ res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
+ frame_entries, frame_data, &frame);
+
+ if (res != NO_ERROR || frame == NULL) {
+ ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
+ mParent->signalError();
+ return false;
+ }
+
+ res = append_camera_metadata(frame, mRequest);
+ if (res != NO_ERROR) {
+ ALOGE("Unable to append request metadata");
+ }
+
+ add_camera_metadata_entry(frame,
+ ANDROID_SENSOR_TIMESTAMP,
+ &captureTime,
+ 1);
+
+ uint32_t hourOfDay = (uint32_t)mParent->mSensor->getScene().getHour();
+ uint32_t *requestedHour;
+ res = find_camera_metadata_entry(frame,
+ EMULATOR_SCENE_HOUROFDAY,
+ NULL,
+ (void**)&requestedHour, NULL);
+ if (res == NAME_NOT_FOUND) {
+ ALOGV("Adding vendor tag");
+ res = add_camera_metadata_entry(frame,
+ EMULATOR_SCENE_HOUROFDAY,
+ &hourOfDay, 1);
+ if (res != NO_ERROR) {
+ ALOGE("Unable to add vendor tag");
+ }
+ } else if (res == OK) {
+ ALOGV("Replacing value in vendor tag");
+ *requestedHour = hourOfDay;
+ } else {
+ ALOGE("Error looking up vendor tag");
+ }
+
+ // TODO: Collect all final values used from sensor in addition to timestamp
+
+ mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
+ frame);
+ }
+
+ res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
+ if (res != NO_ERROR) {
+ ALOGE("%s: Unable to return request buffer to queue: %d",
+ __FUNCTION__, res);
+ mParent->signalError();
+ return false;
+ }
+ mRequest = NULL;
+
+ ALOGV("Sending image buffer to output stream.");
+ GraphicBufferMapper::get().unlock(*mBuffer);
+ mParent->mRawStreamOps->enqueue_buffer(mParent->mRawStreamOps,
+ captureTime, mBuffer);
+ mBuffer = NULL;
+
+ return true;
+}
+
}; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index 89b12d3..c7af31e 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -24,6 +24,9 @@
*/
#include "EmulatedCamera2.h"
+#include "fake-pipeline2/Sensor.h"
+#include <utils/Condition.h>
+#include <utils/Thread.h>
namespace android {
@@ -44,22 +47,187 @@
public:
/* Initializes EmulatedFakeCamera2 instance. */
- status_t Initialize();
+ status_t Initialize();
/****************************************************************************
- * EmulatedCamera abstract API implementation.
+ * Camera Module API and generic hardware device API implementation
+ ***************************************************************************/
+public:
+
+ virtual status_t connectCamera(hw_device_t** device);
+
+ virtual status_t closeCamera();
+
+ virtual status_t getCameraInfo(struct camera_info *info);
+
+ /****************************************************************************
+ * EmulatedCamera2 abstract API implementation.
+ ***************************************************************************/
+protected:
+ /** Request input queue */
+
+ virtual int requestQueueNotify();
+
+ /** Count of requests in flight */
+ //virtual int getInProgressCount();
+
+ /** Cancel all captures in flight */
+ //virtual int flushCapturesInProgress();
+
+ /** Construct default request */
+ // virtual int constructDefaultRequest(
+ // int request_template,
+ // camera_metadata_t **request);
+
+ virtual int allocateStream(
+ uint32_t width,
+ uint32_t height,
+ int format,
+ camera2_stream_ops_t *stream_ops,
+ uint32_t *stream_id,
+ uint32_t *format_actual,
+ uint32_t *usage,
+ uint32_t *max_buffers);
+
+ virtual int registerStreamBuffers(
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers);
+
+ virtual int releaseStream(uint32_t stream_id);
+
+ // virtual int allocateReprocessStream(
+ // uint32_t width,
+ // uint32_t height,
+ // uint32_t format,
+ // camera2_stream_ops_t *stream_ops,
+ // uint32_t *stream_id,
+ // uint32_t *format_actual,
+ // uint32_t *usage,
+ // uint32_t *max_buffers);
+
+ // virtual int releaseReprocessStream(uint32_t stream_id);
+
+ // virtual int triggerAction(uint32_t trigger_id,
+ // int ext1,
+ // int ext2);
+
+ /** Custom tag definitions */
+ virtual const char* getVendorSectionName(uint32_t tag);
+ virtual const char* getVendorTagName(uint32_t tag);
+ virtual int getVendorTagType(uint32_t tag);
+
+ /** Debug methods */
+
+ virtual int dump(int fd);
+
+ /** Methods for worker threads to call */
+
+ // Notifies rest of camera subsystem of serious error
+ void signalError();
+
+private:
+ /****************************************************************************
+ * Pipeline controller threads
***************************************************************************/
-protected:
+ class ConfigureThread: public Thread {
+ public:
+ ConfigureThread(EmulatedFakeCamera2 *parent);
+ ~ConfigureThread();
+
+ status_t waitUntilRunning();
+ status_t newRequestAvailable();
+ status_t readyToRun();
+ private:
+ EmulatedFakeCamera2 *mParent;
+
+ bool mRunning;
+ bool threadLoop();
+
+ Mutex mInputMutex; // Protects mActive
+ Condition mInputSignal;
+ bool mActive; // Whether we're waiting for input requests or actively
+ // working on them
+
+ camera_metadata_t *mRequest;
+ uint32_t *mNextFrameNumber;
+ uint64_t *mNextExposureTime;
+ uint64_t *mNextFrameDuration;
+ uint32_t *mNextSensitivity;
+ buffer_handle_t *mNextBuffer;
+ int mNextBufferStride;
+ };
+
+ class ReadoutThread: public Thread {
+ public:
+ ReadoutThread(EmulatedFakeCamera2 *parent);
+ ~ReadoutThread();
+
+ status_t readyToRun();
+
+ // Input
+ status_t waitUntilRunning();
+ void setNextCapture(camera_metadata_t *request,
+ buffer_handle_t *buffer);
+
+ private:
+ EmulatedFakeCamera2 *mParent;
+
+ bool mRunning;
+ bool threadLoop();
+
+ // Inputs
+ Mutex mInputMutex; // Protects mActive, mInFlightQueue
+ Condition mInputSignal;
+ bool mActive;
+
+ static const int kInFlightQueueSize = 4;
+ struct InFlightQueue {
+ camera_metadata_t *request;
+ buffer_handle_t *buffer;
+ } *mInFlightQueue;
+
+ int mInFlightHead;
+ int mInFlightTail;
+
+ // Internals
+ camera_metadata_t *mRequest;
+ buffer_handle_t *mBuffer;
+
+ };
/****************************************************************************
- * Data memebers.
+ * Static configuration information
+ ***************************************************************************/
+private:
+ static const uint32_t kAvailableFormats[];
+ static const uint32_t kAvailableSizesPerFormat[];
+ static const uint32_t kAvailableSizes[];
+ static const uint64_t kAvailableMinFrameDurations[];
+
+ /****************************************************************************
+ * Data members.
***************************************************************************/
protected:
/* Facing back (true) or front (false) switch. */
- bool mFacingBack;
+ bool mFacingBack;
+private:
+ /** Mutex for calls through camera2 device interface */
+ Mutex mMutex;
+
+ /** Stream manipulation */
+ uint32_t mNextStreamId;
+ camera2_stream_ops_t *mRawStreamOps;
+
+ /** Simulated hardware interfaces */
+ sp<Sensor> mSensor;
+
+ /** Pipeline control threads */
+ sp<ConfigureThread> mConfigureThread;
+ sp<ReadoutThread> mReadoutThread;
};
}; /* namespace android */
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.cpp b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
new file mode 100644
index 0000000..aae57f7
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
@@ -0,0 +1,444 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera_Scene"
+#include <utils/Log.h>
+
+#include "Scene.h"
+
+// TODO: This should probably be done host-side in OpenGL for speed and better
+// quality
+
+namespace android {
+
+// Define single-letter shortcuts for scene definition, for directly indexing
+// mCurrentColors
+#define G Scene::GRASS*4
+#define S Scene::GRASS_SHADOW*4
+#define H Scene::HILL*4
+#define W Scene::WALL*4
+#define R Scene::ROOF*4
+#define D Scene::DOOR*4
+#define C Scene::CHIMNEY*4
+#define I Scene::WINDOW*4
+#define U Scene::SUN*4
+#define K Scene::SKY*4
+#define M Scene::MOON*4
+
+const int Scene::kSceneWidth = 20;
+const int Scene::kSceneHeight = 20;
+
+const uint8_t Scene::kScene[Scene::kSceneWidth * Scene::kSceneHeight] = {
+ // 5 10 15 20
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K, // 5
+ K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+ K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,H,H,H,
+ K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,C,C,H,H,H,
+ K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,C,C,H,H,H,
+ H,K,K,K,K,K,H,R,R,R,R,R,R,R,R,R,R,R,R,H, // 10
+ H,K,K,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+ H,H,H,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+ H,H,H,K,K,H,H,H,W,W,W,W,W,W,W,W,W,W,H,H,
+ S,S,S,G,G,S,S,S,W,W,W,W,W,W,W,W,W,W,S,S,
+ S,G,G,G,G,S,S,S,W,I,I,W,D,D,W,I,I,W,S,S, // 15
+ G,G,G,G,G,G,S,S,W,I,I,W,D,D,W,I,I,W,S,S,
+ G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+ G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+ G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G,
+ G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G, // 20
+ // 5 10 15 20
+};
+
+#undef G
+#undef S
+#undef H
+#undef W
+#undef R
+#undef D
+#undef C
+#undef I
+#undef U
+#undef K
+#undef M
+
+Scene::Scene(
+ int sensorWidthPx,
+ int sensorHeightPx,
+ float sensorSensitivity):
+ mSensorWidth(sensorWidthPx),
+ mSensorHeight(sensorHeightPx),
+ mHour(12),
+ mExposureDuration(0.033f),
+ mSensorSensitivity(sensorSensitivity)
+{
+ // Map scene to sensor pixels
+ if (mSensorWidth > mSensorHeight) {
+ mMapDiv = (mSensorWidth / kSceneWidth) + 1;
+ } else {
+ mMapDiv = (mSensorHeight / kSceneHeight) + 1;
+ }
+ mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
+ mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
+
+ // Assume that sensor filters are sRGB primaries to start
+ mFilterR[0] = 3.2406f; mFilterR[1] = -1.5372f; mFilterR[2] = -0.4986f;
+ mFilterGr[0] = -0.9689f; mFilterGr[1] = 1.8758f; mFilterGr[2] = 0.0415f;
+ mFilterGb[0] = -0.9689f; mFilterGb[1] = 1.8758f; mFilterGb[2] = 0.0415f;
+ mFilterB[0] = 0.0557f; mFilterB[1] = -0.2040f; mFilterB[2] = 1.0570f;
+}
+
+Scene::~Scene() {
+}
+
+void Scene::setColorFilterXYZ(
+ float rX, float rY, float rZ,
+ float grX, float grY, float grZ,
+ float gbX, float gbY, float gbZ,
+ float bX, float bY, float bZ) {
+ mFilterR[0] = rX; mFilterR[1] = rY; mFilterR[2] = rZ;
+ mFilterGr[0] = grX; mFilterGr[1] = grY; mFilterGr[2] = grZ;
+ mFilterGb[0] = gbX; mFilterGb[1] = gbY; mFilterGb[2] = gbZ;
+ mFilterB[0] = bX; mFilterB[1] = bY; mFilterB[2] = bZ;
+}
+
+void Scene::setHour(int hour) {
+ ALOGV("Hour set to: %d", hour);
+ mHour = hour % 24;
+}
+
+int Scene::getHour() {
+ return mHour;
+}
+
+void Scene::setExposureDuration(float seconds) {
+ mExposureDuration = seconds;
+}
+
+void Scene::calculateScene(nsecs_t time) {
+ // Calculate time fractions for interpolation
+ int timeIdx = mHour / kTimeStep;
+ int nextTimeIdx = (timeIdx + 1) % (24 / kTimeStep);
+ const nsecs_t kOneHourInNsec = 1e9 * 60 * 60;
+ nsecs_t timeSinceIdx = (mHour - timeIdx * kTimeStep) * kOneHourInNsec + time;
+ float timeFrac = timeSinceIdx / (float)(kOneHourInNsec * kTimeStep);
+
+ // Determine overall sunlight levels
+ float sunLux =
+ kSunlight[timeIdx] * (1 - timeFrac) +
+ kSunlight[nextTimeIdx] * timeFrac;
+ ALOGV("Sun lux: %f", sunLux);
+
+ float sunShadeLux = sunLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+ // Determine sun/shade illumination chromaticity
+ float currentSunXY[2];
+ float currentShadeXY[2];
+
+ const float *prevSunXY, *nextSunXY;
+ const float *prevShadeXY, *nextShadeXY;
+ if (kSunlight[timeIdx] == kSunsetIllum ||
+ kSunlight[timeIdx] == kTwilightIllum) {
+ prevSunXY = kSunsetXY;
+ prevShadeXY = kSunsetXY;
+ } else {
+ prevSunXY = kDirectSunlightXY;
+ prevShadeXY = kDaylightXY;
+ }
+ if (kSunlight[nextTimeIdx] == kSunsetIllum ||
+ kSunlight[nextTimeIdx] == kTwilightIllum) {
+ nextSunXY = kSunsetXY;
+ nextShadeXY = kSunsetXY;
+ } else {
+ nextSunXY = kDirectSunlightXY;
+ nextShadeXY = kDaylightXY;
+ }
+ currentSunXY[0] = prevSunXY[0] * (1 - timeFrac) +
+ nextSunXY[0] * timeFrac;
+ currentSunXY[1] = prevSunXY[1] * (1 - timeFrac) +
+ nextSunXY[1] * timeFrac;
+
+ currentShadeXY[0] = prevShadeXY[0] * (1 - timeFrac) +
+ nextShadeXY[0] * timeFrac;
+ currentShadeXY[1] = prevShadeXY[1] * (1 - timeFrac) +
+ nextShadeXY[1] * timeFrac;
+
+ ALOGV("Sun XY: %f, %f, Shade XY: %f, %f",
+ currentSunXY[0], currentSunXY[1],
+ currentShadeXY[0], currentShadeXY[1]);
+
+ // Converting for xyY to XYZ:
+ // X = Y / y * x
+ // Y = Y
+ // Z = Y / y * (1 - x - y);
+ float sunXYZ[3] = {
+ sunLux / currentSunXY[1] * currentSunXY[0],
+ sunLux,
+ sunLux / currentSunXY[1] *
+ (1 - currentSunXY[0] - currentSunXY[1])
+ };
+ float sunShadeXYZ[3] = {
+ sunShadeLux / currentShadeXY[1] * currentShadeXY[0],
+ sunShadeLux,
+ sunShadeLux / currentShadeXY[1] *
+ (1 - currentShadeXY[0] - currentShadeXY[1])
+ };
+ ALOGV("Sun XYZ: %f, %f, %f",
+ sunXYZ[0], sunXYZ[1], sunXYZ[2]);
+ ALOGV("Sun shade XYZ: %f, %f, %f",
+ sunShadeXYZ[0], sunShadeXYZ[1], sunShadeXYZ[2]);
+
+ // Determine moonlight levels
+ float moonLux =
+ kMoonlight[timeIdx] * (1 - timeFrac) +
+ kMoonlight[nextTimeIdx] * timeFrac;
+ float moonShadeLux = moonLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+ float moonXYZ[3] = {
+ moonLux / kMoonlightXY[1] * kMoonlightXY[0],
+ moonLux,
+ moonLux / kMoonlightXY[1] *
+ (1 - kMoonlightXY[0] - kMoonlightXY[1])
+ };
+ float moonShadeXYZ[3] = {
+ moonShadeLux / kMoonlightXY[1] * kMoonlightXY[0],
+ moonShadeLux,
+ moonShadeLux / kMoonlightXY[1] *
+ (1 - kMoonlightXY[0] - kMoonlightXY[1])
+ };
+
+ // Determine starlight level
+ const float kClearNightXYZ[3] = {
+ kClearNightIllum / kMoonlightXY[1] * kMoonlightXY[0],
+ kClearNightIllum,
+ kClearNightIllum / kMoonlightXY[1] *
+ (1 - kMoonlightXY[0] - kMoonlightXY[1])
+ };
+
+ // Calculate direct and shaded light
+ float directIllumXYZ[3] = {
+ sunXYZ[0] + moonXYZ[0] + kClearNightXYZ[0],
+ sunXYZ[1] + moonXYZ[1] + kClearNightXYZ[1],
+ sunXYZ[2] + moonXYZ[2] + kClearNightXYZ[2],
+ };
+
+ float shadeIllumXYZ[3] = {
+ kClearNightXYZ[0],
+ kClearNightXYZ[1],
+ kClearNightXYZ[2]
+ };
+
+ shadeIllumXYZ[0] += (mHour < kSunOverhead) ? sunXYZ[0] : sunShadeXYZ[0];
+ shadeIllumXYZ[1] += (mHour < kSunOverhead) ? sunXYZ[1] : sunShadeXYZ[1];
+ shadeIllumXYZ[2] += (mHour < kSunOverhead) ? sunXYZ[2] : sunShadeXYZ[2];
+
+ // Moon up period covers 23->0 transition, shift for simplicity
+ int adjHour = (mHour + 12) % 24;
+ int adjMoonOverhead = (kMoonOverhead + 12 ) % 24;
+ shadeIllumXYZ[0] += (adjHour < adjMoonOverhead) ?
+ moonXYZ[0] : moonShadeXYZ[0];
+ shadeIllumXYZ[1] += (adjHour < adjMoonOverhead) ?
+ moonXYZ[1] : moonShadeXYZ[1];
+ shadeIllumXYZ[2] += (adjHour < adjMoonOverhead) ?
+ moonXYZ[2] : moonShadeXYZ[2];
+
+ ALOGV("Direct XYZ: %f, %f, %f",
+ directIllumXYZ[0],directIllumXYZ[1],directIllumXYZ[2]);
+ ALOGV("Shade XYZ: %f, %f, %f",
+ shadeIllumXYZ[0], shadeIllumXYZ[1], shadeIllumXYZ[2]);
+
+ for (int i = 0; i < NUM_MATERIALS; i++) {
+ // Converting for xyY to XYZ:
+ // X = Y / y * x
+ // Y = Y
+ // Z = Y / y * (1 - x - y);
+ float matXYZ[3] = {
+ kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+ kMaterials_xyY[i][0],
+ kMaterials_xyY[i][2],
+ kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+ (1 - kMaterials_xyY[i][0] - kMaterials_xyY[i][1])
+ };
+
+ if (kMaterialsFlags[i] == 0 || kMaterialsFlags[i] & kSky) {
+ matXYZ[0] *= directIllumXYZ[0];
+ matXYZ[1] *= directIllumXYZ[1];
+ matXYZ[2] *= directIllumXYZ[2];
+ } else if (kMaterialsFlags[i] & kShadowed) {
+ matXYZ[0] *= shadeIllumXYZ[0];
+ matXYZ[1] *= shadeIllumXYZ[1];
+ matXYZ[2] *= shadeIllumXYZ[2];
+ } // else if (kMaterialsFlags[i] * kSelfLit), do nothing
+
+ ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
+ float luxToElectrons = mSensorSensitivity * mExposureDuration /
+ (kAperture * kAperture);
+ mCurrentColors[i*4 + 0] =
+ (mFilterR[0] * matXYZ[0] +
+ mFilterR[1] * matXYZ[1] +
+ mFilterR[2] * matXYZ[2])
+ * luxToElectrons;
+ mCurrentColors[i*4 + 1] =
+ (mFilterGr[0] * matXYZ[0] +
+ mFilterGr[1] * matXYZ[1] +
+ mFilterGr[2] * matXYZ[2])
+ * luxToElectrons;
+ mCurrentColors[i*4 + 2] =
+ (mFilterGb[0] * matXYZ[0] +
+ mFilterGb[1] * matXYZ[1] +
+ mFilterGb[2] * matXYZ[2])
+ * luxToElectrons;
+ mCurrentColors[i*4 + 3] =
+ (mFilterB[0] * matXYZ[0] +
+ mFilterB[1] * matXYZ[1] +
+ mFilterB[2] * matXYZ[2])
+ * luxToElectrons;
+ ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
+ mCurrentColors[i*4 + 0],
+ mCurrentColors[i*4 + 1],
+ mCurrentColors[i*4 + 2],
+ mCurrentColors[i*4 + 3]);
+ }
+ setReadoutPixel(0,0);
+}
+
+void Scene::setReadoutPixel(int x, int y) {
+ mCurrentX = x;
+ mCurrentY = y;
+ mSubX = (x + mOffsetY) % mMapDiv;
+ mSubY = (y + mOffsetX) % mMapDiv;
+ mSceneX = (x + mOffsetX) / mMapDiv;
+ mSceneY = (y + mOffsetY) / mMapDiv;
+ mSceneIdx = mSceneY * kSceneWidth + mSceneX;
+ mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+}
+
+uint32_t Scene::getPixelElectrons(int x, int y, int c) {
+ uint32_t e = mCurrentSceneMaterial[c];
+ mCurrentX++;
+ mSubX++;
+ if (mCurrentX >= mSensorWidth) {
+ mCurrentX = 0;
+ mCurrentY++;
+ if (mCurrentY >= mSensorHeight) mCurrentY = 0;
+ setReadoutPixel(mCurrentX, mCurrentY);
+ } else if (mSubX > mMapDiv) {
+ mSceneIdx++;
+ mSceneX++;
+ mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+ mSubX = 0;
+ }
+ return e;
+}
+
+// Aperture of imaging lens
+const float Scene::kAperture = 2.8;
+
+// Sun illumination levels through the day
+const float Scene::kSunlight[24/kTimeStep] =
+{
+ 0, // 00:00
+ 0,
+ 0,
+ kTwilightIllum, // 06:00
+ kDirectSunIllum,
+ kDirectSunIllum,
+ kDirectSunIllum, // 12:00
+ kDirectSunIllum,
+ kDirectSunIllum,
+ kSunsetIllum, // 18:00
+ kTwilightIllum,
+ 0
+};
+
+// Moon illumination levels through the day
+const float Scene::kMoonlight[24/kTimeStep] =
+{
+ kFullMoonIllum, // 00:00
+ kFullMoonIllum,
+ 0,
+ 0, // 06:00
+ 0,
+ 0,
+ 0, // 12:00
+ 0,
+ 0,
+ 0, // 18:00
+ 0,
+ kFullMoonIllum
+};
+
+const int Scene::kSunOverhead = 12;
+const int Scene::kMoonOverhead = 0;
+
+// Used for sun illumination levels
+const float Scene::kDirectSunIllum = 100000;
+const float Scene::kSunsetIllum = 400;
+const float Scene::kTwilightIllum = 4;
+// Used for moon illumination levels
+const float Scene::kFullMoonIllum = 1;
+// Other illumination levels
+const float Scene::kDaylightShadeIllum = 20000;
+const float Scene::kClearNightIllum = 2e-3;
+const float Scene::kStarIllum = 2e-6;
+const float Scene::kLivingRoomIllum = 50;
+
+const float Scene::kIncandescentXY[2] = { 0.44757f, 0.40745f};
+const float Scene::kDirectSunlightXY[2] = { 0.34842f, 0.35161f};
+const float Scene::kDaylightXY[2] = { 0.31271f, 0.32902f};
+const float Scene::kNoonSkyXY[2] = { 0.346f, 0.359f};
+const float Scene::kMoonlightXY[2] = { 0.34842f, 0.35161f};
+const float Scene::kSunsetXY[2] = { 0.527f, 0.413f};
+
+const uint8_t Scene::kSelfLit = 0x01;
+const uint8_t Scene::kShadowed = 0x02;
+const uint8_t Scene::kSky = 0x04;
+
+// For non-self-lit materials, the Y component is normalized with 1=full
+// reflectance; for self-lit materials, it's the constant illuminance in lux.
+const float Scene::kMaterials_xyY[Scene::NUM_MATERIALS][3] = {
+ { 0.3688f, 0.4501f, .1329f }, // GRASS
+ { 0.3688f, 0.4501f, .1329f }, // GRASS_SHADOW
+ { 0.3986f, 0.5002f, .4440f }, // HILL
+ { 0.3262f, 0.5040f, .2297f }, // WALL
+ { 0.4336f, 0.3787f, .1029f }, // ROOF
+ { 0.3316f, 0.2544f, .0639f }, // DOOR
+ { 0.3425f, 0.3577f, .0887f }, // CHIMNEY
+ { kIncandescentXY[0], kIncandescentXY[1], kLivingRoomIllum }, // WINDOW
+ { kDirectSunlightXY[0], kDirectSunlightXY[1], kDirectSunIllum }, // SUN
+ { kNoonSkyXY[0], kNoonSkyXY[1], kDaylightShadeIllum / kDirectSunIllum }, // SKY
+ { kMoonlightXY[0], kMoonlightXY[1], kFullMoonIllum } // MOON
+};
+
+const uint8_t Scene::kMaterialsFlags[Scene::NUM_MATERIALS] = {
+ 0,
+ kShadowed,
+ kShadowed,
+ kShadowed,
+ kShadowed,
+ kShadowed,
+ kShadowed,
+ kSelfLit,
+ kSelfLit,
+ kSky,
+ kSelfLit,
+};
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.h b/tools/emulator/system/camera/fake-pipeline2/Scene.h
new file mode 100644
index 0000000..89edaed
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The Scene class implements a simple physical simulation of a scene, using the
+ * CIE 1931 colorspace to represent light in physical units (lux).
+ *
+ * It's fairly approximate, but does provide a scene with realistic widely
+ * variable illumination levels and colors over time.
+ *
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SCENE_H
+#define HW_EMULATOR_CAMERA2_SCENE_H
+
+#include "utils/Timers.h"
+
+namespace android {
+
+class Scene {
+ public:
+ Scene(int sensorWidthPx,
+ int sensorHeightPx,
+ float sensorSensitivity);
+ ~Scene();
+
+ // Set the filter coefficients for the red, green, and blue filters on the
+ // sensor. Used as an optimization to pre-calculate various illuminance
+ // values. Two different green filters can be provided, to account for
+ // possible cross-talk on a Bayer sensor. Must be called before
+ // calculateScene.
+ void setColorFilterXYZ(
+ float rX, float rY, float rZ,
+ float grX, float grY, float grZ,
+ float gbX, float gbY, float gbZ,
+ float bX, float bY, float bZ);
+
+ // Set time of day (24-hour clock). This controls the general light levels
+ // in the scene. Must be called before calculateScene
+ void setHour(int hour);
+ // Get current hour
+ int getHour();
+
+ // Set the duration of exposure for determining luminous exposure.
+ // Must be called before calculateScene
+ void setExposureDuration(float seconds);
+
+ // Calculate scene information for current hour and the time offset since
+ // the hour. Must be called at least once before calling getLuminousExposure.
+ // Resets pixel readout location to 0,0
+ void calculateScene(nsecs_t time);
+
+ // Set sensor pixel readout location.
+ void setReadoutPixel(int x, int y);
+
+ // Get sensor response in physical units (electrons) for light hitting the
+ // current readout pixel, after passing through color filters. The color
+ // channels are 0=R, 1=Gr, 2=Gb, 3=B. The readout pixel will be
+ // auto-incremented.
+ uint32_t getPixelElectrons(int x, int y, int c);
+
+ private:
+ // Sensor color filtering coefficients in XYZ
+ float mFilterR[3];
+ float mFilterGr[3];
+ float mFilterGb[3];
+ float mFilterB[3];
+
+ int mOffsetX, mOffsetY;
+ int mMapDiv;
+
+ int mSensorWidth;
+ int mSensorHeight;
+ int mCurrentX;
+ int mCurrentY;
+ int mSubX;
+ int mSubY;
+ int mSceneX;
+ int mSceneY;
+ int mSceneIdx;
+ uint32_t *mCurrentSceneMaterial;
+
+ int mHour;
+ float mExposureDuration;
+ float mSensorSensitivity;
+
+ enum Materials {
+ GRASS = 0,
+ GRASS_SHADOW,
+ HILL,
+ WALL,
+ ROOF,
+ DOOR,
+ CHIMNEY,
+ WINDOW,
+ SUN,
+ SKY,
+ MOON,
+ NUM_MATERIALS
+ };
+
+ uint32_t mCurrentColors[NUM_MATERIALS*4];
+
+ /**
+ * Constants for scene definition. These are various degrees of approximate.
+ */
+
+ // Aperture of imaging lens
+ static const float kAperture;
+
+ // Sun, moon illuminance levels in 2-hour increments. These don't match any
+ // real day anywhere.
+ static const uint32_t kTimeStep = 2;
+ static const float kSunlight[];
+ static const float kMoonlight[];
+ static const int kSunOverhead;
+ static const int kMoonOverhead;
+
+ // Illumination levels for various conditions, in lux
+ static const float kDirectSunIllum;
+ static const float kDaylightShadeIllum;
+ static const float kSunsetIllum;
+ static const float kTwilightIllum;
+ static const float kFullMoonIllum;
+ static const float kClearNightIllum;
+ static const float kStarIllum;
+ static const float kLivingRoomIllum;
+
+ // Chromaticity of various illumination sources
+ static const float kIncandescentXY[2];
+ static const float kDirectSunlightXY[2];
+ static const float kDaylightXY[2];
+ static const float kNoonSkyXY[2];
+ static const float kMoonlightXY[2];
+ static const float kSunsetXY[2];
+
+ static const uint8_t kSelfLit;
+ static const uint8_t kShadowed;
+ static const uint8_t kSky;
+
+ static const float kMaterials_xyY[NUM_MATERIALS][3];
+ static const uint8_t kMaterialsFlags[NUM_MATERIALS];
+
+ static const int kSceneWidth;
+ static const int kSceneHeight;
+ static const uint8_t kScene[];
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SCENE_H
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
new file mode 100644
index 0000000..d24d166
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -0,0 +1,353 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera2_Sensor"
+#include <utils/Log.h>
+
+#include "Sensor.h"
+#include <cmath>
+#include <cstdlib>
+#include "system/camera_metadata.h"
+
+namespace android {
+
+const unsigned int Sensor::kResolution[2] = {640, 480};
+
+const nsecs_t Sensor::kExposureTimeRange[2] =
+ {1000L, 30000000000L} ; // 1 us - 30 sec
+const nsecs_t Sensor::kFrameDurationRange[2] =
+ {33331760L, 30000000000L}; // ~1/30 s - 30 sec
+const nsecs_t Sensor::kMinVerticalBlank = 10000L;
+
+const uint8_t Sensor::kColorFilterArrangement = ANDROID_SENSOR_RGGB;
+
+// Output image data characteristics
+const uint32_t Sensor::kMaxRawValue = 4000;
+const uint32_t Sensor::kBlackLevel = 1000;
+
+// Sensor sensitivity
+const float Sensor::kSaturationVoltage = 0.520f;
+const uint32_t Sensor::kSaturationElectrons = 2000;
+const float Sensor::kVoltsPerLuxSecond = 0.100f;
+
+const float Sensor::kElectronsPerLuxSecond =
+ Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
+ * Sensor::kVoltsPerLuxSecond;
+
+const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
+ Sensor::kSaturationElectrons;
+
+const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
+const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
+const float Sensor::kReadNoiseVarBeforeGain =
+ Sensor::kReadNoiseStddevBeforeGain *
+ Sensor::kReadNoiseStddevBeforeGain;
+const float Sensor::kReadNoiseVarAfterGain =
+ Sensor::kReadNoiseStddevAfterGain *
+ Sensor::kReadNoiseStddevAfterGain;
+
+// While each row has to read out, reset, and then expose, the (reset +
+// expose) sequence can be overlapped by other row readouts, so the final
+// minimum frame duration is purely a function of row readout time, at least
+// if there's a reasonable number of rows.
+const nsecs_t Sensor::kRowReadoutTime =
+ Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
+
+const uint32_t Sensor::kAvailableSensitivities[5] =
+ {100, 200, 400, 800, 1600};
+const uint32_t Sensor::kDefaultSensitivity = 100;
+
+/** A few utility functions for math, normal distributions */
+
+// Take advantage of IEEE floating-point format to calculate an approximate
+// square root. Accurate to within +-3.6%
+float sqrtf_approx(float r) {
+ // Modifier is based on IEEE floating-point representation; the
+ // manipulations boil down to finding approximate log2, dividing by two, and
+ // then inverting the log2. A bias is added to make the relative error
+ // symmetric about the real answer.
+ const int32_t modifier = 0x1FBB4000;
+
+ int32_t r_i = *(int32_t*)(&r);
+ r_i = (r_i >> 1) + modifier;
+
+ return *(float*)(&r_i);
+}
+
+
+
+Sensor::Sensor():
+ Thread(false),
+ mGotVSync(false),
+ mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
+ mFrameDuration(kFrameDurationRange[0]),
+ mGainFactor(kDefaultSensitivity),
+ mNextBuffer(NULL),
+ mCapturedBuffer(NULL),
+ mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
+{
+
+}
+
+Sensor::~Sensor() {
+ shutDown();
+}
+
+status_t Sensor::startUp() {
+ int res;
+ mCapturedBuffer = NULL;
+
+ res = readyToRun();
+ if (res != OK) {
+ ALOGE("Unable to prepare sensor capture thread to run: %d", res);
+ return res;
+ }
+ res = run("EmulatedFakeCamera2::Sensor",
+ ANDROID_PRIORITY_URGENT_DISPLAY);
+
+ if (res != OK) {
+ ALOGE("Unable to start up sensor capture thread: %d", res);
+ }
+ return res;
+}
+
+status_t Sensor::shutDown() {
+ int res;
+ res = requestExitAndWait();
+ if (res != OK) {
+ ALOGE("Unable to shut down sensor capture thread: %d", res);
+ }
+ return res;
+}
+
+Scene &Sensor::getScene() {
+ return mScene;
+}
+
+void Sensor::setExposureTime(uint64_t ns) {
+ Mutex::Autolock lock(mControlMutex);
+ ALOGV("Exposure set to %f", ns/1000000.f);
+ mExposureTime = ns;
+}
+
+void Sensor::setFrameDuration(uint64_t ns) {
+ Mutex::Autolock lock(mControlMutex);
+ ALOGV("Frame duration set to %f", ns/1000000.f);
+ mFrameDuration = ns;
+}
+
+void Sensor::setSensitivity(uint32_t gain) {
+ Mutex::Autolock lock(mControlMutex);
+ ALOGV("Gain set to %d", gain);
+ mGainFactor = gain;
+}
+
+void Sensor::setDestinationBuffer(uint8_t *buffer, uint32_t stride) {
+ Mutex::Autolock lock(mControlMutex);
+ mNextBuffer = buffer;
+ mNextStride = stride;
+}
+
+bool Sensor::waitForVSync(nsecs_t reltime) {
+ int res;
+ Mutex::Autolock lock(mControlMutex);
+
+ mGotVSync = false;
+ res = mVSync.waitRelative(mControlMutex, reltime);
+ if (res != OK && res != TIMED_OUT) {
+ ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
+ return false;
+ }
+ return mGotVSync;
+}
+
+bool Sensor::waitForNewFrame(nsecs_t reltime,
+ nsecs_t *captureTime) {
+ Mutex::Autolock lock(mReadoutMutex);
+ uint8_t *ret;
+ if (mCapturedBuffer == NULL) {
+ int res;
+ res = mReadoutComplete.waitRelative(mReadoutMutex, reltime);
+ if (res == TIMED_OUT) {
+ return false;
+ } else if (res != OK || mCapturedBuffer == NULL) {
+ ALOGE("Error waiting for sensor readout signal: %d", res);
+ return false;
+ }
+ }
+ *captureTime = mCaptureTime;
+ mCapturedBuffer = NULL;
+ return true;
+}
+
+status_t Sensor::readyToRun() {
+ ALOGV("Starting up sensor thread");
+ mStartupTime = systemTime();
+ mNextCaptureTime = 0;
+ mNextCapturedBuffer = NULL;
+ return OK;
+}
+
+bool Sensor::threadLoop() {
+ /**
+ * Sensor capture operation main loop.
+ *
+ * Stages are out-of-order relative to a single frame's processing, but
+ * in-order in time.
+ */
+
+ /**
+ * Stage 1: Read in latest control parameters
+ */
+ uint64_t exposureDuration;
+ uint64_t frameDuration;
+ uint32_t gain;
+ uint8_t *nextBuffer;
+ uint32_t stride;
+ {
+ Mutex::Autolock lock(mControlMutex);
+ exposureDuration = mExposureTime;
+ frameDuration = mFrameDuration;
+ gain = mGainFactor;
+ nextBuffer = mNextBuffer;
+ stride = mNextStride;
+ // Don't reuse a buffer
+ mNextBuffer = NULL;
+
+ // Signal VSync for start of readout
+ ALOGV("Sensor VSync");
+ mGotVSync = true;
+ mVSync.signal();
+ }
+
+ /**
+ * Stage 3: Read out latest captured image
+ */
+
+ uint8_t *capturedBuffer = NULL;
+ nsecs_t captureTime = 0;
+
+ nsecs_t startRealTime = systemTime();
+ nsecs_t simulatedTime = startRealTime - mStartupTime;
+ nsecs_t frameEndRealTime = startRealTime + frameDuration;
+ nsecs_t frameReadoutEndRealTime = startRealTime +
+ kRowReadoutTime * kResolution[1];
+
+ if (mNextCapturedBuffer != NULL) {
+ ALOGV("Sensor starting readout");
+ // Pretend we're doing readout now; will signal once enough time has elapsed
+ capturedBuffer = mNextCapturedBuffer;
+ captureTime = mNextCaptureTime;
+ }
+ simulatedTime += kRowReadoutTime + kMinVerticalBlank;
+
+ /**
+ * Stage 2: Capture new image
+ */
+
+ mNextCaptureTime = simulatedTime;
+ mNextCapturedBuffer = nextBuffer;
+
+ if (mNextCapturedBuffer != NULL) {
+ ALOGV("Sensor capturing image (%d x %d) stride %d",
+ kResolution[0], kResolution[1], stride);
+ ALOGV("Exposure: %f ms, gain: %d", (float)exposureDuration/1e6, gain);
+ mScene.setExposureDuration((float)exposureDuration/1e9);
+ mScene.calculateScene(mNextCaptureTime);
+
+ float totalGain = gain/100.0 * kBaseGainFactor;
+ float noiseVarGain = totalGain * totalGain;
+ float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+ + kReadNoiseVarAfterGain;
+
+ int bayerSelect[4] = {0, 1, 2, 3}; // RGGB
+
+ for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+ int *bayerRow = bayerSelect + (y & 0x1) * 2;
+ uint16_t *px = (uint16_t*)mNextCapturedBuffer + y * stride;
+ for (unsigned int x = 0; x < kResolution[0]; x++) {
+ uint32_t electronCount;
+ electronCount = mScene.getPixelElectrons(x, y, bayerRow[x & 0x1]);
+
+ // TODO: Better pixel saturation curve?
+ electronCount = (electronCount < kSaturationElectrons) ?
+ electronCount : kSaturationElectrons;
+
+ // TODO: Better A/D saturation curve?
+ uint16_t rawCount = electronCount * totalGain;
+ rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
+
+ // Calculate noise value
+ // TODO: Use more-correct Gaussian instead of uniform noise
+ float photonNoiseVar = electronCount * noiseVarGain;
+ float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
+ // Scaled to roughly match gaussian/uniform noise stddev
+ float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+ rawCount += kBlackLevel;
+ rawCount += noiseStddev * noiseSample;
+
+ *px++ = rawCount;
+ }
+ simulatedTime += kRowReadoutTime;
+
+ // If enough time has elapsed to complete readout, signal done frame
+ // Only check every so often, though
+ if ((capturedBuffer != NULL) &&
+ ((y & 63) == 0) &&
+ (systemTime() >= frameReadoutEndRealTime) ) {
+ ALOGV("Sensor readout complete");
+ Mutex::Autolock lock(mReadoutMutex);
+ mCapturedBuffer = capturedBuffer;
+ mCaptureTime = captureTime;
+ mReadoutComplete.signal();
+ capturedBuffer = NULL;
+ }
+ }
+ ALOGV("Sensor image captured");
+ }
+ // No capture done, or finished image generation before readout was completed
+ if (capturedBuffer != NULL) {
+ ALOGV("Sensor readout complete");
+ Mutex::Autolock lock(mReadoutMutex);
+ mCapturedBuffer = capturedBuffer;
+ mCaptureTime = captureTime;
+ mReadoutComplete.signal();
+ capturedBuffer = NULL;
+ }
+
+ ALOGV("Sensor vertical blanking interval");
+ nsecs_t workDoneRealTime = systemTime();
+ const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
+ if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
+ timespec t;
+ t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
+ t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
+
+ int ret;
+ do {
+ ret = nanosleep(&t, &t);
+ } while (ret != 0);
+ }
+ nsecs_t endRealTime = systemTime();
+ ALOGV("Frame cycle took %d ms, target %d ms",
+ (int)((endRealTime - startRealTime)/1000000),
+ (int)(frameDuration / 1000000));
+ return true;
+};
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.h b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
new file mode 100644
index 0000000..565d10a
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This class is a simple simulation of a typical CMOS cellphone imager chip,
+ * which outputs 12-bit Bayer-mosaic raw images.
+ *
+ * The sensor is abstracted as operating as a pipeline 3 stages deep;
+ * conceptually, each frame to be captured goes through these three stages. The
+ * processing step for the sensor is marked off by vertical sync signals, which
+ * indicate the start of readout of the oldest frame. The interval between
+ * processing steps depends on the frame duration of the frame currently being
+ * captured. The stages are 1) configure, 2) capture, and 3) readout. During
+ * configuration, the sensor's registers for settings such as exposure time,
+ * frame duration, and gain are set for the next frame to be captured. In stage
+ * 2, the image data for the frame is actually captured by the sensor. Finally,
+ * in stage 3, the just-captured data is read out and sent to the rest of the
+ * system.
+ *
+ * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
+ * sensor are exposed earlier in time than larger-numbered rows, with the time
+ * offset between each row being equal to the row readout time.
+ *
+ * The characteristics of this sensor don't correspond to any actual sensor,
+ * but are not far off typical sensors.
+ *
+ * Example timing diagram, with three frames:
+ * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
+ * Frame 2: Frame duration 75 ms, exposure time 65 ms.
+ * Legend:
+ * C = update sensor registers for frame
+ * v = row in reset (vertical blanking interval)
+ * E = row capturing image data
+ * R = row being read out
+ * | = vertical sync signal
+ *time(ms)| 0 55 105 155 230 270
+ * Frame 0| :configure : capture : readout : : :
+ * Row # | ..|CCCC______|_________|_________| : :
+ * 0 | :\ \vvvvvEEEER \ : :
+ * 500 | : \ \vvvvvEEEER \ : :
+ * 1000 | : \ \vvvvvEEEER \ : :
+ * 1500 | : \ \vvvvvEEEER \ : :
+ * 2000 | : \__________\vvvvvEEEER_________\ : :
+ * Frame 1| : configure capture readout : :
+ * Row # | : |CCCC_____|_________|______________| :
+ * 0 | : :\ \vvvvvEEEER \ :
+ * 500 | : : \ \vvvvvEEEER \ :
+ * 1000 | : : \ \vvvvvEEEER \ :
+ * 1500 | : : \ \vvvvvEEEER \ :
+ * 2000 | : : \_________\vvvvvEEEER______________\ :
+ * Frame 2| : : configure capture readout:
+ * Row # | : : |CCCC_____|______________|_______|...
+ * 0 | : : :\ \vEEEEEEEEEEEEER \
+ * 500 | : : : \ \vEEEEEEEEEEEEER \
+ * 1000 | : : : \ \vEEEEEEEEEEEEER \
+ * 1500 | : : : \ \vEEEEEEEEEEEEER \
+ * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
+#define HW_EMULATOR_CAMERA2_SENSOR_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+
+#include "Scene.h"
+
+namespace android {
+
+class Sensor: private Thread, public virtual RefBase {
+ public:
+
+ Sensor();
+ ~Sensor();
+
+ /*
+ * Power control
+ */
+
+ status_t startUp();
+ status_t shutDown();
+
+ /*
+ * Access to scene
+ */
+ Scene &getScene();
+
+ /*
+ * Controls that can be updated every frame
+ */
+
+ void setExposureTime(uint64_t ns);
+ void setFrameDuration(uint64_t ns);
+ void setSensitivity(uint32_t gain);
+ // Buffer must be at least stride*height*2 bytes in size
+ void setDestinationBuffer(uint8_t *buffer, uint32_t stride);
+
+ /*
+ * Controls that cause reconfiguration delay
+ */
+
+ void setBinning(int horizontalFactor, int verticalFactor);
+
+ /*
+ * Synchronizing with sensor operation (vertical sync)
+ */
+
+ // Wait until the sensor outputs its next vertical sync signal, meaning it
+ // is starting readout of its latest frame of data. Returns true if vertical
+ // sync is signaled, false if the wait timed out.
+ bool waitForVSync(nsecs_t reltime);
+
+ // Wait until a new frame has been read out, and then return the time
+ // capture started. May return immediately if a new frame has been pushed
+ // since the last wait for a new frame. Returns true if new frame is
+ // returned, false if timed out.
+ bool waitForNewFrame(nsecs_t reltime,
+ nsecs_t *captureTime);
+
+ /**
+ * Static sensor characteristics
+ */
+ static const unsigned int kResolution[2];
+
+ static const nsecs_t kExposureTimeRange[2];
+ static const nsecs_t kFrameDurationRange[2];
+ static const nsecs_t kMinVerticalBlank;
+
+ static const uint8_t kColorFilterArrangement;
+
+ // Output image data characteristics
+ static const uint32_t kMaxRawValue;
+ static const uint32_t kBlackLevel;
+ // Sensor sensitivity, approximate
+
+ static const float kSaturationVoltage;
+ static const uint32_t kSaturationElectrons;
+ static const float kVoltsPerLuxSecond;
+ static const float kElectronsPerLuxSecond;
+
+ static const float kBaseGainFactor;
+
+ static const float kReadNoiseStddevBeforeGain; // In electrons
+ static const float kReadNoiseStddevAfterGain; // In raw digital units
+ static const float kReadNoiseVarBeforeGain;
+ static const float kReadNoiseVarAfterGain;
+
+ // While each row has to read out, reset, and then expose, the (reset +
+ // expose) sequence can be overlapped by other row readouts, so the final
+ // minimum frame duration is purely a function of row readout time, at least
+ // if there's a reasonable number of rows.
+ static const nsecs_t kRowReadoutTime;
+
+ static const uint32_t kAvailableSensitivities[5];
+ static const uint32_t kDefaultSensitivity;
+
+ private:
+
+ Mutex mControlMutex; // Lock before accessing control parameters
+ // Start of control parameters
+ Condition mVSync;
+ bool mGotVSync;
+ uint64_t mExposureTime;
+ uint64_t mFrameDuration;
+ uint32_t mGainFactor;
+ uint8_t *mNextBuffer;
+ uint32_t mNextStride;
+ // End of control parameters
+
+ Mutex mReadoutMutex; // Lock before accessing readout variables
+ // Start of readout variables
+ Condition mReadoutComplete;
+ uint8_t *mCapturedBuffer;
+ nsecs_t mCaptureTime;
+ // End of readout variables
+
+ // Time of sensor startup, used for simulation zero-time point
+ nsecs_t mStartupTime;
+
+ /**
+ * Inherited Thread virtual overrides, and members only used by the
+ * processing thread
+ */
+ private:
+ virtual status_t readyToRun();
+
+ virtual bool threadLoop();
+
+ nsecs_t mNextCaptureTime;
+ uint8_t *mNextCapturedBuffer;
+
+ Scene mScene;
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SENSOR_H